Home | History | Annotate | Download | only in CodeGen
      1 //===---- CGBuiltin.cpp - Emit LLVM Code for builtins ---------------------===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This contains code to emit Builtin calls as LLVM code.
     11 //
     12 //===----------------------------------------------------------------------===//
     13 
     14 #include "TargetInfo.h"
     15 #include "CodeGenFunction.h"
     16 #include "CodeGenModule.h"
     17 #include "CGObjCRuntime.h"
     18 #include "clang/Basic/TargetInfo.h"
     19 #include "clang/AST/ASTContext.h"
     20 #include "clang/AST/Decl.h"
     21 #include "clang/Basic/TargetBuiltins.h"
     22 #include "llvm/Intrinsics.h"
     23 #include "llvm/Target/TargetData.h"
     24 
     25 using namespace clang;
     26 using namespace CodeGen;
     27 using namespace llvm;
     28 
     29 /// getBuiltinLibFunction - Given a builtin id for a function like
     30 /// "__builtin_fabsf", return a Function* for "fabsf".
     31 llvm::Value *CodeGenModule::getBuiltinLibFunction(const FunctionDecl *FD,
     32                                                   unsigned BuiltinID) {
     33   assert(Context.BuiltinInfo.isLibFunction(BuiltinID));
     34 
     35   // Get the name, skip over the __builtin_ prefix (if necessary).
     36   StringRef Name;
     37   GlobalDecl D(FD);
     38 
     39   // If the builtin has been declared explicitly with an assembler label,
     40   // use the mangled name. This differs from the plain label on platforms
     41   // that prefix labels.
     42   if (FD->hasAttr<AsmLabelAttr>())
     43     Name = getMangledName(D);
     44   else
     45     Name = Context.BuiltinInfo.GetName(BuiltinID) + 10;
     46 
     47   llvm::FunctionType *Ty =
     48     cast<llvm::FunctionType>(getTypes().ConvertType(FD->getType()));
     49 
     50   return GetOrCreateLLVMFunction(Name, Ty, D, /*ForVTable=*/false);
     51 }
     52 
     53 /// Emit the conversions required to turn the given value into an
     54 /// integer of the given size.
     55 static Value *EmitToInt(CodeGenFunction &CGF, llvm::Value *V,
     56                         QualType T, llvm::IntegerType *IntType) {
     57   V = CGF.EmitToMemory(V, T);
     58 
     59   if (V->getType()->isPointerTy())
     60     return CGF.Builder.CreatePtrToInt(V, IntType);
     61 
     62   assert(V->getType() == IntType);
     63   return V;
     64 }
     65 
     66 static Value *EmitFromInt(CodeGenFunction &CGF, llvm::Value *V,
     67                           QualType T, llvm::Type *ResultType) {
     68   V = CGF.EmitFromMemory(V, T);
     69 
     70   if (ResultType->isPointerTy())
     71     return CGF.Builder.CreateIntToPtr(V, ResultType);
     72 
     73   assert(V->getType() == ResultType);
     74   return V;
     75 }
     76 
     77 /// Utility to insert an atomic instruction based on Instrinsic::ID
     78 /// and the expression node.
     79 static RValue EmitBinaryAtomic(CodeGenFunction &CGF,
     80                                llvm::AtomicRMWInst::BinOp Kind,
     81                                const CallExpr *E) {
     82   QualType T = E->getType();
     83   assert(E->getArg(0)->getType()->isPointerType());
     84   assert(CGF.getContext().hasSameUnqualifiedType(T,
     85                                   E->getArg(0)->getType()->getPointeeType()));
     86   assert(CGF.getContext().hasSameUnqualifiedType(T, E->getArg(1)->getType()));
     87 
     88   llvm::Value *DestPtr = CGF.EmitScalarExpr(E->getArg(0));
     89   unsigned AddrSpace =
     90     cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
     91 
     92   llvm::IntegerType *IntType =
     93     llvm::IntegerType::get(CGF.getLLVMContext(),
     94                            CGF.getContext().getTypeSize(T));
     95   llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
     96 
     97   llvm::Value *Args[2];
     98   Args[0] = CGF.Builder.CreateBitCast(DestPtr, IntPtrType);
     99   Args[1] = CGF.EmitScalarExpr(E->getArg(1));
    100   llvm::Type *ValueType = Args[1]->getType();
    101   Args[1] = EmitToInt(CGF, Args[1], T, IntType);
    102 
    103   llvm::Value *Result =
    104       CGF.Builder.CreateAtomicRMW(Kind, Args[0], Args[1],
    105                                   llvm::SequentiallyConsistent);
    106   Result = EmitFromInt(CGF, Result, T, ValueType);
    107   return RValue::get(Result);
    108 }
    109 
    110 /// Utility to insert an atomic instruction based Instrinsic::ID and
    111 /// the expression node, where the return value is the result of the
    112 /// operation.
    113 static RValue EmitBinaryAtomicPost(CodeGenFunction &CGF,
    114                                    llvm::AtomicRMWInst::BinOp Kind,
    115                                    const CallExpr *E,
    116                                    Instruction::BinaryOps Op) {
    117   QualType T = E->getType();
    118   assert(E->getArg(0)->getType()->isPointerType());
    119   assert(CGF.getContext().hasSameUnqualifiedType(T,
    120                                   E->getArg(0)->getType()->getPointeeType()));
    121   assert(CGF.getContext().hasSameUnqualifiedType(T, E->getArg(1)->getType()));
    122 
    123   llvm::Value *DestPtr = CGF.EmitScalarExpr(E->getArg(0));
    124   unsigned AddrSpace =
    125     cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
    126 
    127   llvm::IntegerType *IntType =
    128     llvm::IntegerType::get(CGF.getLLVMContext(),
    129                            CGF.getContext().getTypeSize(T));
    130   llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
    131 
    132   llvm::Value *Args[2];
    133   Args[1] = CGF.EmitScalarExpr(E->getArg(1));
    134   llvm::Type *ValueType = Args[1]->getType();
    135   Args[1] = EmitToInt(CGF, Args[1], T, IntType);
    136   Args[0] = CGF.Builder.CreateBitCast(DestPtr, IntPtrType);
    137 
    138   llvm::Value *Result =
    139       CGF.Builder.CreateAtomicRMW(Kind, Args[0], Args[1],
    140                                   llvm::SequentiallyConsistent);
    141   Result = CGF.Builder.CreateBinOp(Op, Result, Args[1]);
    142   Result = EmitFromInt(CGF, Result, T, ValueType);
    143   return RValue::get(Result);
    144 }
    145 
    146 /// EmitFAbs - Emit a call to fabs/fabsf/fabsl, depending on the type of ValTy,
    147 /// which must be a scalar floating point type.
    148 static Value *EmitFAbs(CodeGenFunction &CGF, Value *V, QualType ValTy) {
    149   const BuiltinType *ValTyP = ValTy->getAs<BuiltinType>();
    150   assert(ValTyP && "isn't scalar fp type!");
    151 
    152   StringRef FnName;
    153   switch (ValTyP->getKind()) {
    154   default: llvm_unreachable("Isn't a scalar fp type!");
    155   case BuiltinType::Float:      FnName = "fabsf"; break;
    156   case BuiltinType::Double:     FnName = "fabs"; break;
    157   case BuiltinType::LongDouble: FnName = "fabsl"; break;
    158   }
    159 
    160   // The prototype is something that takes and returns whatever V's type is.
    161   llvm::FunctionType *FT = llvm::FunctionType::get(V->getType(), V->getType(),
    162                                                    false);
    163   llvm::Value *Fn = CGF.CGM.CreateRuntimeFunction(FT, FnName);
    164 
    165   return CGF.Builder.CreateCall(Fn, V, "abs");
    166 }
    167 
    168 static RValue emitLibraryCall(CodeGenFunction &CGF, const FunctionDecl *Fn,
    169                               const CallExpr *E, llvm::Value *calleeValue) {
    170   return CGF.EmitCall(E->getCallee()->getType(), calleeValue,
    171                       ReturnValueSlot(), E->arg_begin(), E->arg_end(), Fn);
    172 }
    173 
    174 RValue CodeGenFunction::EmitBuiltinExpr(const FunctionDecl *FD,
    175                                         unsigned BuiltinID, const CallExpr *E) {
    176   // See if we can constant fold this builtin.  If so, don't emit it at all.
    177   Expr::EvalResult Result;
    178   if (E->EvaluateAsRValue(Result, CGM.getContext()) &&
    179       !Result.hasSideEffects()) {
    180     if (Result.Val.isInt())
    181       return RValue::get(llvm::ConstantInt::get(getLLVMContext(),
    182                                                 Result.Val.getInt()));
    183     if (Result.Val.isFloat())
    184       return RValue::get(llvm::ConstantFP::get(getLLVMContext(),
    185                                                Result.Val.getFloat()));
    186   }
    187 
    188   switch (BuiltinID) {
    189   default: break;  // Handle intrinsics and libm functions below.
    190   case Builtin::BI__builtin___CFStringMakeConstantString:
    191   case Builtin::BI__builtin___NSStringMakeConstantString:
    192     return RValue::get(CGM.EmitConstantExpr(E, E->getType(), 0));
    193   case Builtin::BI__builtin_stdarg_start:
    194   case Builtin::BI__builtin_va_start:
    195   case Builtin::BI__builtin_va_end: {
    196     Value *ArgValue = EmitVAListRef(E->getArg(0));
    197     llvm::Type *DestType = Int8PtrTy;
    198     if (ArgValue->getType() != DestType)
    199       ArgValue = Builder.CreateBitCast(ArgValue, DestType,
    200                                        ArgValue->getName().data());
    201 
    202     Intrinsic::ID inst = (BuiltinID == Builtin::BI__builtin_va_end) ?
    203       Intrinsic::vaend : Intrinsic::vastart;
    204     return RValue::get(Builder.CreateCall(CGM.getIntrinsic(inst), ArgValue));
    205   }
    206   case Builtin::BI__builtin_va_copy: {
    207     Value *DstPtr = EmitVAListRef(E->getArg(0));
    208     Value *SrcPtr = EmitVAListRef(E->getArg(1));
    209 
    210     llvm::Type *Type = Int8PtrTy;
    211 
    212     DstPtr = Builder.CreateBitCast(DstPtr, Type);
    213     SrcPtr = Builder.CreateBitCast(SrcPtr, Type);
    214     return RValue::get(Builder.CreateCall2(CGM.getIntrinsic(Intrinsic::vacopy),
    215                                            DstPtr, SrcPtr));
    216   }
    217   case Builtin::BI__builtin_abs:
    218   case Builtin::BI__builtin_labs:
    219   case Builtin::BI__builtin_llabs: {
    220     Value *ArgValue = EmitScalarExpr(E->getArg(0));
    221 
    222     Value *NegOp = Builder.CreateNeg(ArgValue, "neg");
    223     Value *CmpResult =
    224     Builder.CreateICmpSGE(ArgValue,
    225                           llvm::Constant::getNullValue(ArgValue->getType()),
    226                                                             "abscond");
    227     Value *Result =
    228       Builder.CreateSelect(CmpResult, ArgValue, NegOp, "abs");
    229 
    230     return RValue::get(Result);
    231   }
    232   case Builtin::BI__builtin_ctzs:
    233   case Builtin::BI__builtin_ctz:
    234   case Builtin::BI__builtin_ctzl:
    235   case Builtin::BI__builtin_ctzll: {
    236     Value *ArgValue = EmitScalarExpr(E->getArg(0));
    237 
    238     llvm::Type *ArgType = ArgValue->getType();
    239     Value *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
    240 
    241     llvm::Type *ResultType = ConvertType(E->getType());
    242     Value *ZeroUndef = Builder.getInt1(Target.isCLZForZeroUndef());
    243     Value *Result = Builder.CreateCall2(F, ArgValue, ZeroUndef);
    244     if (Result->getType() != ResultType)
    245       Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
    246                                      "cast");
    247     return RValue::get(Result);
    248   }
    249   case Builtin::BI__builtin_clzs:
    250   case Builtin::BI__builtin_clz:
    251   case Builtin::BI__builtin_clzl:
    252   case Builtin::BI__builtin_clzll: {
    253     Value *ArgValue = EmitScalarExpr(E->getArg(0));
    254 
    255     llvm::Type *ArgType = ArgValue->getType();
    256     Value *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
    257 
    258     llvm::Type *ResultType = ConvertType(E->getType());
    259     Value *ZeroUndef = Builder.getInt1(Target.isCLZForZeroUndef());
    260     Value *Result = Builder.CreateCall2(F, ArgValue, ZeroUndef);
    261     if (Result->getType() != ResultType)
    262       Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
    263                                      "cast");
    264     return RValue::get(Result);
    265   }
    266   case Builtin::BI__builtin_ffs:
    267   case Builtin::BI__builtin_ffsl:
    268   case Builtin::BI__builtin_ffsll: {
    269     // ffs(x) -> x ? cttz(x) + 1 : 0
    270     Value *ArgValue = EmitScalarExpr(E->getArg(0));
    271 
    272     llvm::Type *ArgType = ArgValue->getType();
    273     Value *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
    274 
    275     llvm::Type *ResultType = ConvertType(E->getType());
    276     Value *Tmp = Builder.CreateAdd(Builder.CreateCall2(F, ArgValue,
    277                                                        Builder.getTrue()),
    278                                    llvm::ConstantInt::get(ArgType, 1));
    279     Value *Zero = llvm::Constant::getNullValue(ArgType);
    280     Value *IsZero = Builder.CreateICmpEQ(ArgValue, Zero, "iszero");
    281     Value *Result = Builder.CreateSelect(IsZero, Zero, Tmp, "ffs");
    282     if (Result->getType() != ResultType)
    283       Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
    284                                      "cast");
    285     return RValue::get(Result);
    286   }
    287   case Builtin::BI__builtin_parity:
    288   case Builtin::BI__builtin_parityl:
    289   case Builtin::BI__builtin_parityll: {
    290     // parity(x) -> ctpop(x) & 1
    291     Value *ArgValue = EmitScalarExpr(E->getArg(0));
    292 
    293     llvm::Type *ArgType = ArgValue->getType();
    294     Value *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
    295 
    296     llvm::Type *ResultType = ConvertType(E->getType());
    297     Value *Tmp = Builder.CreateCall(F, ArgValue);
    298     Value *Result = Builder.CreateAnd(Tmp, llvm::ConstantInt::get(ArgType, 1));
    299     if (Result->getType() != ResultType)
    300       Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
    301                                      "cast");
    302     return RValue::get(Result);
    303   }
    304   case Builtin::BI__builtin_popcount:
    305   case Builtin::BI__builtin_popcountl:
    306   case Builtin::BI__builtin_popcountll: {
    307     Value *ArgValue = EmitScalarExpr(E->getArg(0));
    308 
    309     llvm::Type *ArgType = ArgValue->getType();
    310     Value *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
    311 
    312     llvm::Type *ResultType = ConvertType(E->getType());
    313     Value *Result = Builder.CreateCall(F, ArgValue);
    314     if (Result->getType() != ResultType)
    315       Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
    316                                      "cast");
    317     return RValue::get(Result);
    318   }
    319   case Builtin::BI__builtin_expect: {
    320     Value *ArgValue = EmitScalarExpr(E->getArg(0));
    321     llvm::Type *ArgType = ArgValue->getType();
    322 
    323     Value *FnExpect = CGM.getIntrinsic(Intrinsic::expect, ArgType);
    324     Value *ExpectedValue = EmitScalarExpr(E->getArg(1));
    325 
    326     Value *Result = Builder.CreateCall2(FnExpect, ArgValue, ExpectedValue,
    327                                         "expval");
    328     return RValue::get(Result);
    329   }
    330   case Builtin::BI__builtin_bswap32:
    331   case Builtin::BI__builtin_bswap64: {
    332     Value *ArgValue = EmitScalarExpr(E->getArg(0));
    333     llvm::Type *ArgType = ArgValue->getType();
    334     Value *F = CGM.getIntrinsic(Intrinsic::bswap, ArgType);
    335     return RValue::get(Builder.CreateCall(F, ArgValue));
    336   }
    337   case Builtin::BI__builtin_object_size: {
    338     // We pass this builtin onto the optimizer so that it can
    339     // figure out the object size in more complex cases.
    340     llvm::Type *ResType = ConvertType(E->getType());
    341 
    342     // LLVM only supports 0 and 2, make sure that we pass along that
    343     // as a boolean.
    344     Value *Ty = EmitScalarExpr(E->getArg(1));
    345     ConstantInt *CI = dyn_cast<ConstantInt>(Ty);
    346     assert(CI);
    347     uint64_t val = CI->getZExtValue();
    348     CI = ConstantInt::get(Builder.getInt1Ty(), (val & 0x2) >> 1);
    349 
    350     Value *F = CGM.getIntrinsic(Intrinsic::objectsize, ResType);
    351     return RValue::get(Builder.CreateCall2(F,
    352                                            EmitScalarExpr(E->getArg(0)),
    353                                            CI));
    354   }
    355   case Builtin::BI__builtin_prefetch: {
    356     Value *Locality, *RW, *Address = EmitScalarExpr(E->getArg(0));
    357     // FIXME: Technically these constants should of type 'int', yes?
    358     RW = (E->getNumArgs() > 1) ? EmitScalarExpr(E->getArg(1)) :
    359       llvm::ConstantInt::get(Int32Ty, 0);
    360     Locality = (E->getNumArgs() > 2) ? EmitScalarExpr(E->getArg(2)) :
    361       llvm::ConstantInt::get(Int32Ty, 3);
    362     Value *Data = llvm::ConstantInt::get(Int32Ty, 1);
    363     Value *F = CGM.getIntrinsic(Intrinsic::prefetch);
    364     return RValue::get(Builder.CreateCall4(F, Address, RW, Locality, Data));
    365   }
    366   case Builtin::BI__builtin_trap: {
    367     Value *F = CGM.getIntrinsic(Intrinsic::trap);
    368     return RValue::get(Builder.CreateCall(F));
    369   }
    370   case Builtin::BI__builtin_unreachable: {
    371     if (CatchUndefined)
    372       EmitBranch(getTrapBB());
    373     else
    374       Builder.CreateUnreachable();
    375 
    376     // We do need to preserve an insertion point.
    377     EmitBlock(createBasicBlock("unreachable.cont"));
    378 
    379     return RValue::get(0);
    380   }
    381 
    382   case Builtin::BI__builtin_powi:
    383   case Builtin::BI__builtin_powif:
    384   case Builtin::BI__builtin_powil: {
    385     Value *Base = EmitScalarExpr(E->getArg(0));
    386     Value *Exponent = EmitScalarExpr(E->getArg(1));
    387     llvm::Type *ArgType = Base->getType();
    388     Value *F = CGM.getIntrinsic(Intrinsic::powi, ArgType);
    389     return RValue::get(Builder.CreateCall2(F, Base, Exponent));
    390   }
    391 
    392   case Builtin::BI__builtin_isgreater:
    393   case Builtin::BI__builtin_isgreaterequal:
    394   case Builtin::BI__builtin_isless:
    395   case Builtin::BI__builtin_islessequal:
    396   case Builtin::BI__builtin_islessgreater:
    397   case Builtin::BI__builtin_isunordered: {
    398     // Ordered comparisons: we know the arguments to these are matching scalar
    399     // floating point values.
    400     Value *LHS = EmitScalarExpr(E->getArg(0));
    401     Value *RHS = EmitScalarExpr(E->getArg(1));
    402 
    403     switch (BuiltinID) {
    404     default: llvm_unreachable("Unknown ordered comparison");
    405     case Builtin::BI__builtin_isgreater:
    406       LHS = Builder.CreateFCmpOGT(LHS, RHS, "cmp");
    407       break;
    408     case Builtin::BI__builtin_isgreaterequal:
    409       LHS = Builder.CreateFCmpOGE(LHS, RHS, "cmp");
    410       break;
    411     case Builtin::BI__builtin_isless:
    412       LHS = Builder.CreateFCmpOLT(LHS, RHS, "cmp");
    413       break;
    414     case Builtin::BI__builtin_islessequal:
    415       LHS = Builder.CreateFCmpOLE(LHS, RHS, "cmp");
    416       break;
    417     case Builtin::BI__builtin_islessgreater:
    418       LHS = Builder.CreateFCmpONE(LHS, RHS, "cmp");
    419       break;
    420     case Builtin::BI__builtin_isunordered:
    421       LHS = Builder.CreateFCmpUNO(LHS, RHS, "cmp");
    422       break;
    423     }
    424     // ZExt bool to int type.
    425     return RValue::get(Builder.CreateZExt(LHS, ConvertType(E->getType())));
    426   }
    427   case Builtin::BI__builtin_isnan: {
    428     Value *V = EmitScalarExpr(E->getArg(0));
    429     V = Builder.CreateFCmpUNO(V, V, "cmp");
    430     return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
    431   }
    432 
    433   case Builtin::BI__builtin_isinf: {
    434     // isinf(x) --> fabs(x) == infinity
    435     Value *V = EmitScalarExpr(E->getArg(0));
    436     V = EmitFAbs(*this, V, E->getArg(0)->getType());
    437 
    438     V = Builder.CreateFCmpOEQ(V, ConstantFP::getInfinity(V->getType()),"isinf");
    439     return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
    440   }
    441 
    442   // TODO: BI__builtin_isinf_sign
    443   //   isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0
    444 
    445   case Builtin::BI__builtin_isnormal: {
    446     // isnormal(x) --> x == x && fabsf(x) < infinity && fabsf(x) >= float_min
    447     Value *V = EmitScalarExpr(E->getArg(0));
    448     Value *Eq = Builder.CreateFCmpOEQ(V, V, "iseq");
    449 
    450     Value *Abs = EmitFAbs(*this, V, E->getArg(0)->getType());
    451     Value *IsLessThanInf =
    452       Builder.CreateFCmpULT(Abs, ConstantFP::getInfinity(V->getType()),"isinf");
    453     APFloat Smallest = APFloat::getSmallestNormalized(
    454                    getContext().getFloatTypeSemantics(E->getArg(0)->getType()));
    455     Value *IsNormal =
    456       Builder.CreateFCmpUGE(Abs, ConstantFP::get(V->getContext(), Smallest),
    457                             "isnormal");
    458     V = Builder.CreateAnd(Eq, IsLessThanInf, "and");
    459     V = Builder.CreateAnd(V, IsNormal, "and");
    460     return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
    461   }
    462 
    463   case Builtin::BI__builtin_isfinite: {
    464     // isfinite(x) --> x == x && fabs(x) != infinity;
    465     Value *V = EmitScalarExpr(E->getArg(0));
    466     Value *Eq = Builder.CreateFCmpOEQ(V, V, "iseq");
    467 
    468     Value *Abs = EmitFAbs(*this, V, E->getArg(0)->getType());
    469     Value *IsNotInf =
    470       Builder.CreateFCmpUNE(Abs, ConstantFP::getInfinity(V->getType()),"isinf");
    471 
    472     V = Builder.CreateAnd(Eq, IsNotInf, "and");
    473     return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
    474   }
    475 
    476   case Builtin::BI__builtin_fpclassify: {
    477     Value *V = EmitScalarExpr(E->getArg(5));
    478     llvm::Type *Ty = ConvertType(E->getArg(5)->getType());
    479 
    480     // Create Result
    481     BasicBlock *Begin = Builder.GetInsertBlock();
    482     BasicBlock *End = createBasicBlock("fpclassify_end", this->CurFn);
    483     Builder.SetInsertPoint(End);
    484     PHINode *Result =
    485       Builder.CreatePHI(ConvertType(E->getArg(0)->getType()), 4,
    486                         "fpclassify_result");
    487 
    488     // if (V==0) return FP_ZERO
    489     Builder.SetInsertPoint(Begin);
    490     Value *IsZero = Builder.CreateFCmpOEQ(V, Constant::getNullValue(Ty),
    491                                           "iszero");
    492     Value *ZeroLiteral = EmitScalarExpr(E->getArg(4));
    493     BasicBlock *NotZero = createBasicBlock("fpclassify_not_zero", this->CurFn);
    494     Builder.CreateCondBr(IsZero, End, NotZero);
    495     Result->addIncoming(ZeroLiteral, Begin);
    496 
    497     // if (V != V) return FP_NAN
    498     Builder.SetInsertPoint(NotZero);
    499     Value *IsNan = Builder.CreateFCmpUNO(V, V, "cmp");
    500     Value *NanLiteral = EmitScalarExpr(E->getArg(0));
    501     BasicBlock *NotNan = createBasicBlock("fpclassify_not_nan", this->CurFn);
    502     Builder.CreateCondBr(IsNan, End, NotNan);
    503     Result->addIncoming(NanLiteral, NotZero);
    504 
    505     // if (fabs(V) == infinity) return FP_INFINITY
    506     Builder.SetInsertPoint(NotNan);
    507     Value *VAbs = EmitFAbs(*this, V, E->getArg(5)->getType());
    508     Value *IsInf =
    509       Builder.CreateFCmpOEQ(VAbs, ConstantFP::getInfinity(V->getType()),
    510                             "isinf");
    511     Value *InfLiteral = EmitScalarExpr(E->getArg(1));
    512     BasicBlock *NotInf = createBasicBlock("fpclassify_not_inf", this->CurFn);
    513     Builder.CreateCondBr(IsInf, End, NotInf);
    514     Result->addIncoming(InfLiteral, NotNan);
    515 
    516     // if (fabs(V) >= MIN_NORMAL) return FP_NORMAL else FP_SUBNORMAL
    517     Builder.SetInsertPoint(NotInf);
    518     APFloat Smallest = APFloat::getSmallestNormalized(
    519         getContext().getFloatTypeSemantics(E->getArg(5)->getType()));
    520     Value *IsNormal =
    521       Builder.CreateFCmpUGE(VAbs, ConstantFP::get(V->getContext(), Smallest),
    522                             "isnormal");
    523     Value *NormalResult =
    524       Builder.CreateSelect(IsNormal, EmitScalarExpr(E->getArg(2)),
    525                            EmitScalarExpr(E->getArg(3)));
    526     Builder.CreateBr(End);
    527     Result->addIncoming(NormalResult, NotInf);
    528 
    529     // return Result
    530     Builder.SetInsertPoint(End);
    531     return RValue::get(Result);
    532   }
    533 
    534   case Builtin::BIalloca:
    535   case Builtin::BI__builtin_alloca: {
    536     Value *Size = EmitScalarExpr(E->getArg(0));
    537     return RValue::get(Builder.CreateAlloca(Builder.getInt8Ty(), Size));
    538   }
    539   case Builtin::BIbzero:
    540   case Builtin::BI__builtin_bzero: {
    541     Value *Address = EmitScalarExpr(E->getArg(0));
    542     Value *SizeVal = EmitScalarExpr(E->getArg(1));
    543     unsigned Align = GetPointeeAlignment(E->getArg(0));
    544     Builder.CreateMemSet(Address, Builder.getInt8(0), SizeVal, Align, false);
    545     return RValue::get(Address);
    546   }
    547   case Builtin::BImemcpy:
    548   case Builtin::BI__builtin_memcpy: {
    549     Value *Address = EmitScalarExpr(E->getArg(0));
    550     Value *SrcAddr = EmitScalarExpr(E->getArg(1));
    551     Value *SizeVal = EmitScalarExpr(E->getArg(2));
    552     unsigned Align = std::min(GetPointeeAlignment(E->getArg(0)),
    553                               GetPointeeAlignment(E->getArg(1)));
    554     Builder.CreateMemCpy(Address, SrcAddr, SizeVal, Align, false);
    555     return RValue::get(Address);
    556   }
    557 
    558   case Builtin::BI__builtin___memcpy_chk: {
    559     // fold __builtin_memcpy_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
    560     llvm::APSInt Size, DstSize;
    561     if (!E->getArg(2)->EvaluateAsInt(Size, CGM.getContext()) ||
    562         !E->getArg(3)->EvaluateAsInt(DstSize, CGM.getContext()))
    563       break;
    564     if (Size.ugt(DstSize))
    565       break;
    566     Value *Dest = EmitScalarExpr(E->getArg(0));
    567     Value *Src = EmitScalarExpr(E->getArg(1));
    568     Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
    569     unsigned Align = std::min(GetPointeeAlignment(E->getArg(0)),
    570                               GetPointeeAlignment(E->getArg(1)));
    571     Builder.CreateMemCpy(Dest, Src, SizeVal, Align, false);
    572     return RValue::get(Dest);
    573   }
    574 
    575   case Builtin::BI__builtin_objc_memmove_collectable: {
    576     Value *Address = EmitScalarExpr(E->getArg(0));
    577     Value *SrcAddr = EmitScalarExpr(E->getArg(1));
    578     Value *SizeVal = EmitScalarExpr(E->getArg(2));
    579     CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this,
    580                                                   Address, SrcAddr, SizeVal);
    581     return RValue::get(Address);
    582   }
    583 
    584   case Builtin::BI__builtin___memmove_chk: {
    585     // fold __builtin_memmove_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
    586     llvm::APSInt Size, DstSize;
    587     if (!E->getArg(2)->EvaluateAsInt(Size, CGM.getContext()) ||
    588         !E->getArg(3)->EvaluateAsInt(DstSize, CGM.getContext()))
    589       break;
    590     if (Size.ugt(DstSize))
    591       break;
    592     Value *Dest = EmitScalarExpr(E->getArg(0));
    593     Value *Src = EmitScalarExpr(E->getArg(1));
    594     Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
    595     unsigned Align = std::min(GetPointeeAlignment(E->getArg(0)),
    596                               GetPointeeAlignment(E->getArg(1)));
    597     Builder.CreateMemMove(Dest, Src, SizeVal, Align, false);
    598     return RValue::get(Dest);
    599   }
    600 
    601   case Builtin::BImemmove:
    602   case Builtin::BI__builtin_memmove: {
    603     Value *Address = EmitScalarExpr(E->getArg(0));
    604     Value *SrcAddr = EmitScalarExpr(E->getArg(1));
    605     Value *SizeVal = EmitScalarExpr(E->getArg(2));
    606     unsigned Align = std::min(GetPointeeAlignment(E->getArg(0)),
    607                               GetPointeeAlignment(E->getArg(1)));
    608     Builder.CreateMemMove(Address, SrcAddr, SizeVal, Align, false);
    609     return RValue::get(Address);
    610   }
    611   case Builtin::BImemset:
    612   case Builtin::BI__builtin_memset: {
    613     Value *Address = EmitScalarExpr(E->getArg(0));
    614     Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
    615                                          Builder.getInt8Ty());
    616     Value *SizeVal = EmitScalarExpr(E->getArg(2));
    617     unsigned Align = GetPointeeAlignment(E->getArg(0));
    618     Builder.CreateMemSet(Address, ByteVal, SizeVal, Align, false);
    619     return RValue::get(Address);
    620   }
    621   case Builtin::BI__builtin___memset_chk: {
    622     // fold __builtin_memset_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
    623     llvm::APSInt Size, DstSize;
    624     if (!E->getArg(2)->EvaluateAsInt(Size, CGM.getContext()) ||
    625         !E->getArg(3)->EvaluateAsInt(DstSize, CGM.getContext()))
    626       break;
    627     if (Size.ugt(DstSize))
    628       break;
    629     Value *Address = EmitScalarExpr(E->getArg(0));
    630     Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
    631                                          Builder.getInt8Ty());
    632     Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
    633     unsigned Align = GetPointeeAlignment(E->getArg(0));
    634     Builder.CreateMemSet(Address, ByteVal, SizeVal, Align, false);
    635 
    636     return RValue::get(Address);
    637   }
    638   case Builtin::BI__builtin_dwarf_cfa: {
    639     // The offset in bytes from the first argument to the CFA.
    640     //
    641     // Why on earth is this in the frontend?  Is there any reason at
    642     // all that the backend can't reasonably determine this while
    643     // lowering llvm.eh.dwarf.cfa()?
    644     //
    645     // TODO: If there's a satisfactory reason, add a target hook for
    646     // this instead of hard-coding 0, which is correct for most targets.
    647     int32_t Offset = 0;
    648 
    649     Value *F = CGM.getIntrinsic(Intrinsic::eh_dwarf_cfa);
    650     return RValue::get(Builder.CreateCall(F,
    651                                       llvm::ConstantInt::get(Int32Ty, Offset)));
    652   }
    653   case Builtin::BI__builtin_return_address: {
    654     Value *Depth = EmitScalarExpr(E->getArg(0));
    655     Depth = Builder.CreateIntCast(Depth, Int32Ty, false);
    656     Value *F = CGM.getIntrinsic(Intrinsic::returnaddress);
    657     return RValue::get(Builder.CreateCall(F, Depth));
    658   }
    659   case Builtin::BI__builtin_frame_address: {
    660     Value *Depth = EmitScalarExpr(E->getArg(0));
    661     Depth = Builder.CreateIntCast(Depth, Int32Ty, false);
    662     Value *F = CGM.getIntrinsic(Intrinsic::frameaddress);
    663     return RValue::get(Builder.CreateCall(F, Depth));
    664   }
    665   case Builtin::BI__builtin_extract_return_addr: {
    666     Value *Address = EmitScalarExpr(E->getArg(0));
    667     Value *Result = getTargetHooks().decodeReturnAddress(*this, Address);
    668     return RValue::get(Result);
    669   }
    670   case Builtin::BI__builtin_frob_return_addr: {
    671     Value *Address = EmitScalarExpr(E->getArg(0));
    672     Value *Result = getTargetHooks().encodeReturnAddress(*this, Address);
    673     return RValue::get(Result);
    674   }
    675   case Builtin::BI__builtin_dwarf_sp_column: {
    676     llvm::IntegerType *Ty
    677       = cast<llvm::IntegerType>(ConvertType(E->getType()));
    678     int Column = getTargetHooks().getDwarfEHStackPointer(CGM);
    679     if (Column == -1) {
    680       CGM.ErrorUnsupported(E, "__builtin_dwarf_sp_column");
    681       return RValue::get(llvm::UndefValue::get(Ty));
    682     }
    683     return RValue::get(llvm::ConstantInt::get(Ty, Column, true));
    684   }
    685   case Builtin::BI__builtin_init_dwarf_reg_size_table: {
    686     Value *Address = EmitScalarExpr(E->getArg(0));
    687     if (getTargetHooks().initDwarfEHRegSizeTable(*this, Address))
    688       CGM.ErrorUnsupported(E, "__builtin_init_dwarf_reg_size_table");
    689     return RValue::get(llvm::UndefValue::get(ConvertType(E->getType())));
    690   }
    691   case Builtin::BI__builtin_eh_return: {
    692     Value *Int = EmitScalarExpr(E->getArg(0));
    693     Value *Ptr = EmitScalarExpr(E->getArg(1));
    694 
    695     llvm::IntegerType *IntTy = cast<llvm::IntegerType>(Int->getType());
    696     assert((IntTy->getBitWidth() == 32 || IntTy->getBitWidth() == 64) &&
    697            "LLVM's __builtin_eh_return only supports 32- and 64-bit variants");
    698     Value *F = CGM.getIntrinsic(IntTy->getBitWidth() == 32
    699                                   ? Intrinsic::eh_return_i32
    700                                   : Intrinsic::eh_return_i64);
    701     Builder.CreateCall2(F, Int, Ptr);
    702     Builder.CreateUnreachable();
    703 
    704     // We do need to preserve an insertion point.
    705     EmitBlock(createBasicBlock("builtin_eh_return.cont"));
    706 
    707     return RValue::get(0);
    708   }
    709   case Builtin::BI__builtin_unwind_init: {
    710     Value *F = CGM.getIntrinsic(Intrinsic::eh_unwind_init);
    711     return RValue::get(Builder.CreateCall(F));
    712   }
    713   case Builtin::BI__builtin_extend_pointer: {
    714     // Extends a pointer to the size of an _Unwind_Word, which is
    715     // uint64_t on all platforms.  Generally this gets poked into a
    716     // register and eventually used as an address, so if the
    717     // addressing registers are wider than pointers and the platform
    718     // doesn't implicitly ignore high-order bits when doing
    719     // addressing, we need to make sure we zext / sext based on
    720     // the platform's expectations.
    721     //
    722     // See: http://gcc.gnu.org/ml/gcc-bugs/2002-02/msg00237.html
    723 
    724     // Cast the pointer to intptr_t.
    725     Value *Ptr = EmitScalarExpr(E->getArg(0));
    726     Value *Result = Builder.CreatePtrToInt(Ptr, IntPtrTy, "extend.cast");
    727 
    728     // If that's 64 bits, we're done.
    729     if (IntPtrTy->getBitWidth() == 64)
    730       return RValue::get(Result);
    731 
    732     // Otherwise, ask the codegen data what to do.
    733     if (getTargetHooks().extendPointerWithSExt())
    734       return RValue::get(Builder.CreateSExt(Result, Int64Ty, "extend.sext"));
    735     else
    736       return RValue::get(Builder.CreateZExt(Result, Int64Ty, "extend.zext"));
    737   }
    738   case Builtin::BI__builtin_setjmp: {
    739     // Buffer is a void**.
    740     Value *Buf = EmitScalarExpr(E->getArg(0));
    741 
    742     // Store the frame pointer to the setjmp buffer.
    743     Value *FrameAddr =
    744       Builder.CreateCall(CGM.getIntrinsic(Intrinsic::frameaddress),
    745                          ConstantInt::get(Int32Ty, 0));
    746     Builder.CreateStore(FrameAddr, Buf);
    747 
    748     // Store the stack pointer to the setjmp buffer.
    749     Value *StackAddr =
    750       Builder.CreateCall(CGM.getIntrinsic(Intrinsic::stacksave));
    751     Value *StackSaveSlot =
    752       Builder.CreateGEP(Buf, ConstantInt::get(Int32Ty, 2));
    753     Builder.CreateStore(StackAddr, StackSaveSlot);
    754 
    755     // Call LLVM's EH setjmp, which is lightweight.
    756     Value *F = CGM.getIntrinsic(Intrinsic::eh_sjlj_setjmp);
    757     Buf = Builder.CreateBitCast(Buf, Int8PtrTy);
    758     return RValue::get(Builder.CreateCall(F, Buf));
    759   }
    760   case Builtin::BI__builtin_longjmp: {
    761     Value *Buf = EmitScalarExpr(E->getArg(0));
    762     Buf = Builder.CreateBitCast(Buf, Int8PtrTy);
    763 
    764     // Call LLVM's EH longjmp, which is lightweight.
    765     Builder.CreateCall(CGM.getIntrinsic(Intrinsic::eh_sjlj_longjmp), Buf);
    766 
    767     // longjmp doesn't return; mark this as unreachable.
    768     Builder.CreateUnreachable();
    769 
    770     // We do need to preserve an insertion point.
    771     EmitBlock(createBasicBlock("longjmp.cont"));
    772 
    773     return RValue::get(0);
    774   }
    775   case Builtin::BI__sync_fetch_and_add:
    776   case Builtin::BI__sync_fetch_and_sub:
    777   case Builtin::BI__sync_fetch_and_or:
    778   case Builtin::BI__sync_fetch_and_and:
    779   case Builtin::BI__sync_fetch_and_xor:
    780   case Builtin::BI__sync_add_and_fetch:
    781   case Builtin::BI__sync_sub_and_fetch:
    782   case Builtin::BI__sync_and_and_fetch:
    783   case Builtin::BI__sync_or_and_fetch:
    784   case Builtin::BI__sync_xor_and_fetch:
    785   case Builtin::BI__sync_val_compare_and_swap:
    786   case Builtin::BI__sync_bool_compare_and_swap:
    787   case Builtin::BI__sync_lock_test_and_set:
    788   case Builtin::BI__sync_lock_release:
    789   case Builtin::BI__sync_swap:
    790     llvm_unreachable("Shouldn't make it through sema");
    791   case Builtin::BI__sync_fetch_and_add_1:
    792   case Builtin::BI__sync_fetch_and_add_2:
    793   case Builtin::BI__sync_fetch_and_add_4:
    794   case Builtin::BI__sync_fetch_and_add_8:
    795   case Builtin::BI__sync_fetch_and_add_16:
    796     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Add, E);
    797   case Builtin::BI__sync_fetch_and_sub_1:
    798   case Builtin::BI__sync_fetch_and_sub_2:
    799   case Builtin::BI__sync_fetch_and_sub_4:
    800   case Builtin::BI__sync_fetch_and_sub_8:
    801   case Builtin::BI__sync_fetch_and_sub_16:
    802     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Sub, E);
    803   case Builtin::BI__sync_fetch_and_or_1:
    804   case Builtin::BI__sync_fetch_and_or_2:
    805   case Builtin::BI__sync_fetch_and_or_4:
    806   case Builtin::BI__sync_fetch_and_or_8:
    807   case Builtin::BI__sync_fetch_and_or_16:
    808     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Or, E);
    809   case Builtin::BI__sync_fetch_and_and_1:
    810   case Builtin::BI__sync_fetch_and_and_2:
    811   case Builtin::BI__sync_fetch_and_and_4:
    812   case Builtin::BI__sync_fetch_and_and_8:
    813   case Builtin::BI__sync_fetch_and_and_16:
    814     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::And, E);
    815   case Builtin::BI__sync_fetch_and_xor_1:
    816   case Builtin::BI__sync_fetch_and_xor_2:
    817   case Builtin::BI__sync_fetch_and_xor_4:
    818   case Builtin::BI__sync_fetch_and_xor_8:
    819   case Builtin::BI__sync_fetch_and_xor_16:
    820     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xor, E);
    821 
    822   // Clang extensions: not overloaded yet.
    823   case Builtin::BI__sync_fetch_and_min:
    824     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Min, E);
    825   case Builtin::BI__sync_fetch_and_max:
    826     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Max, E);
    827   case Builtin::BI__sync_fetch_and_umin:
    828     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::UMin, E);
    829   case Builtin::BI__sync_fetch_and_umax:
    830     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::UMax, E);
    831 
    832   case Builtin::BI__sync_add_and_fetch_1:
    833   case Builtin::BI__sync_add_and_fetch_2:
    834   case Builtin::BI__sync_add_and_fetch_4:
    835   case Builtin::BI__sync_add_and_fetch_8:
    836   case Builtin::BI__sync_add_and_fetch_16:
    837     return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Add, E,
    838                                 llvm::Instruction::Add);
    839   case Builtin::BI__sync_sub_and_fetch_1:
    840   case Builtin::BI__sync_sub_and_fetch_2:
    841   case Builtin::BI__sync_sub_and_fetch_4:
    842   case Builtin::BI__sync_sub_and_fetch_8:
    843   case Builtin::BI__sync_sub_and_fetch_16:
    844     return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Sub, E,
    845                                 llvm::Instruction::Sub);
    846   case Builtin::BI__sync_and_and_fetch_1:
    847   case Builtin::BI__sync_and_and_fetch_2:
    848   case Builtin::BI__sync_and_and_fetch_4:
    849   case Builtin::BI__sync_and_and_fetch_8:
    850   case Builtin::BI__sync_and_and_fetch_16:
    851     return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::And, E,
    852                                 llvm::Instruction::And);
    853   case Builtin::BI__sync_or_and_fetch_1:
    854   case Builtin::BI__sync_or_and_fetch_2:
    855   case Builtin::BI__sync_or_and_fetch_4:
    856   case Builtin::BI__sync_or_and_fetch_8:
    857   case Builtin::BI__sync_or_and_fetch_16:
    858     return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Or, E,
    859                                 llvm::Instruction::Or);
    860   case Builtin::BI__sync_xor_and_fetch_1:
    861   case Builtin::BI__sync_xor_and_fetch_2:
    862   case Builtin::BI__sync_xor_and_fetch_4:
    863   case Builtin::BI__sync_xor_and_fetch_8:
    864   case Builtin::BI__sync_xor_and_fetch_16:
    865     return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Xor, E,
    866                                 llvm::Instruction::Xor);
    867 
    868   case Builtin::BI__sync_val_compare_and_swap_1:
    869   case Builtin::BI__sync_val_compare_and_swap_2:
    870   case Builtin::BI__sync_val_compare_and_swap_4:
    871   case Builtin::BI__sync_val_compare_and_swap_8:
    872   case Builtin::BI__sync_val_compare_and_swap_16: {
    873     QualType T = E->getType();
    874     llvm::Value *DestPtr = EmitScalarExpr(E->getArg(0));
    875     unsigned AddrSpace =
    876       cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
    877 
    878     llvm::IntegerType *IntType =
    879       llvm::IntegerType::get(getLLVMContext(),
    880                              getContext().getTypeSize(T));
    881     llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
    882 
    883     Value *Args[3];
    884     Args[0] = Builder.CreateBitCast(DestPtr, IntPtrType);
    885     Args[1] = EmitScalarExpr(E->getArg(1));
    886     llvm::Type *ValueType = Args[1]->getType();
    887     Args[1] = EmitToInt(*this, Args[1], T, IntType);
    888     Args[2] = EmitToInt(*this, EmitScalarExpr(E->getArg(2)), T, IntType);
    889 
    890     Value *Result = Builder.CreateAtomicCmpXchg(Args[0], Args[1], Args[2],
    891                                                 llvm::SequentiallyConsistent);
    892     Result = EmitFromInt(*this, Result, T, ValueType);
    893     return RValue::get(Result);
    894   }
    895 
    896   case Builtin::BI__sync_bool_compare_and_swap_1:
    897   case Builtin::BI__sync_bool_compare_and_swap_2:
    898   case Builtin::BI__sync_bool_compare_and_swap_4:
    899   case Builtin::BI__sync_bool_compare_and_swap_8:
    900   case Builtin::BI__sync_bool_compare_and_swap_16: {
    901     QualType T = E->getArg(1)->getType();
    902     llvm::Value *DestPtr = EmitScalarExpr(E->getArg(0));
    903     unsigned AddrSpace =
    904       cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
    905 
    906     llvm::IntegerType *IntType =
    907       llvm::IntegerType::get(getLLVMContext(),
    908                              getContext().getTypeSize(T));
    909     llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
    910 
    911     Value *Args[3];
    912     Args[0] = Builder.CreateBitCast(DestPtr, IntPtrType);
    913     Args[1] = EmitToInt(*this, EmitScalarExpr(E->getArg(1)), T, IntType);
    914     Args[2] = EmitToInt(*this, EmitScalarExpr(E->getArg(2)), T, IntType);
    915 
    916     Value *OldVal = Args[1];
    917     Value *PrevVal = Builder.CreateAtomicCmpXchg(Args[0], Args[1], Args[2],
    918                                                  llvm::SequentiallyConsistent);
    919     Value *Result = Builder.CreateICmpEQ(PrevVal, OldVal);
    920     // zext bool to int.
    921     Result = Builder.CreateZExt(Result, ConvertType(E->getType()));
    922     return RValue::get(Result);
    923   }
    924 
    925   case Builtin::BI__sync_swap_1:
    926   case Builtin::BI__sync_swap_2:
    927   case Builtin::BI__sync_swap_4:
    928   case Builtin::BI__sync_swap_8:
    929   case Builtin::BI__sync_swap_16:
    930     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xchg, E);
    931 
    932   case Builtin::BI__sync_lock_test_and_set_1:
    933   case Builtin::BI__sync_lock_test_and_set_2:
    934   case Builtin::BI__sync_lock_test_and_set_4:
    935   case Builtin::BI__sync_lock_test_and_set_8:
    936   case Builtin::BI__sync_lock_test_and_set_16:
    937     return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xchg, E);
    938 
    939   case Builtin::BI__sync_lock_release_1:
    940   case Builtin::BI__sync_lock_release_2:
    941   case Builtin::BI__sync_lock_release_4:
    942   case Builtin::BI__sync_lock_release_8:
    943   case Builtin::BI__sync_lock_release_16: {
    944     Value *Ptr = EmitScalarExpr(E->getArg(0));
    945     QualType ElTy = E->getArg(0)->getType()->getPointeeType();
    946     CharUnits StoreSize = getContext().getTypeSizeInChars(ElTy);
    947     llvm::Type *ITy = llvm::IntegerType::get(getLLVMContext(),
    948                                              StoreSize.getQuantity() * 8);
    949     Ptr = Builder.CreateBitCast(Ptr, ITy->getPointerTo());
    950     llvm::StoreInst *Store =
    951       Builder.CreateStore(llvm::Constant::getNullValue(ITy), Ptr);
    952     Store->setAlignment(StoreSize.getQuantity());
    953     Store->setAtomic(llvm::Release);
    954     return RValue::get(0);
    955   }
    956 
    957   case Builtin::BI__sync_synchronize: {
    958     // We assume this is supposed to correspond to a C++0x-style
    959     // sequentially-consistent fence (i.e. this is only usable for
    960     // synchonization, not device I/O or anything like that). This intrinsic
    961     // is really badly designed in the sense that in theory, there isn't
    962     // any way to safely use it... but in practice, it mostly works
    963     // to use it with non-atomic loads and stores to get acquire/release
    964     // semantics.
    965     Builder.CreateFence(llvm::SequentiallyConsistent);
    966     return RValue::get(0);
    967   }
    968 
    969   case Builtin::BI__c11_atomic_is_lock_free:
    970   case Builtin::BI__atomic_is_lock_free: {
    971     // Call "bool __atomic_is_lock_free(size_t size, void *ptr)". For the
    972     // __c11 builtin, ptr is 0 (indicating a properly-aligned object), since
    973     // _Atomic(T) is always properly-aligned.
    974     const char *LibCallName = "__atomic_is_lock_free";
    975     CallArgList Args;
    976     Args.add(RValue::get(EmitScalarExpr(E->getArg(0))),
    977              getContext().getSizeType());
    978     if (BuiltinID == Builtin::BI__atomic_is_lock_free)
    979       Args.add(RValue::get(EmitScalarExpr(E->getArg(1))),
    980                getContext().VoidPtrTy);
    981     else
    982       Args.add(RValue::get(llvm::Constant::getNullValue(VoidPtrTy)),
    983                getContext().VoidPtrTy);
    984     const CGFunctionInfo &FuncInfo =
    985         CGM.getTypes().arrangeFunctionCall(E->getType(), Args,
    986                                            FunctionType::ExtInfo(),
    987                                            RequiredArgs::All);
    988     llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(FuncInfo);
    989     llvm::Constant *Func = CGM.CreateRuntimeFunction(FTy, LibCallName);
    990     return EmitCall(FuncInfo, Func, ReturnValueSlot(), Args);
    991   }
    992 
    993   case Builtin::BI__atomic_test_and_set: {
    994     // Look at the argument type to determine whether this is a volatile
    995     // operation. The parameter type is always volatile.
    996     QualType PtrTy = E->getArg(0)->IgnoreImpCasts()->getType();
    997     bool Volatile =
    998         PtrTy->castAs<PointerType>()->getPointeeType().isVolatileQualified();
    999 
   1000     Value *Ptr = EmitScalarExpr(E->getArg(0));
   1001     unsigned AddrSpace =
   1002         cast<llvm::PointerType>(Ptr->getType())->getAddressSpace();
   1003     Ptr = Builder.CreateBitCast(Ptr, Int8Ty->getPointerTo(AddrSpace));
   1004     Value *NewVal = Builder.getInt8(1);
   1005     Value *Order = EmitScalarExpr(E->getArg(1));
   1006     if (isa<llvm::ConstantInt>(Order)) {
   1007       int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
   1008       AtomicRMWInst *Result = 0;
   1009       switch (ord) {
   1010       case 0:  // memory_order_relaxed
   1011       default: // invalid order
   1012         Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
   1013                                          Ptr, NewVal,
   1014                                          llvm::Monotonic);
   1015         break;
   1016       case 1:  // memory_order_consume
   1017       case 2:  // memory_order_acquire
   1018         Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
   1019                                          Ptr, NewVal,
   1020                                          llvm::Acquire);
   1021         break;
   1022       case 3:  // memory_order_release
   1023         Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
   1024                                          Ptr, NewVal,
   1025                                          llvm::Release);
   1026         break;
   1027       case 4:  // memory_order_acq_rel
   1028         Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
   1029                                          Ptr, NewVal,
   1030                                          llvm::AcquireRelease);
   1031         break;
   1032       case 5:  // memory_order_seq_cst
   1033         Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
   1034                                          Ptr, NewVal,
   1035                                          llvm::SequentiallyConsistent);
   1036         break;
   1037       }
   1038       Result->setVolatile(Volatile);
   1039       return RValue::get(Builder.CreateIsNotNull(Result, "tobool"));
   1040     }
   1041 
   1042     llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
   1043 
   1044     llvm::BasicBlock *BBs[5] = {
   1045       createBasicBlock("monotonic", CurFn),
   1046       createBasicBlock("acquire", CurFn),
   1047       createBasicBlock("release", CurFn),
   1048       createBasicBlock("acqrel", CurFn),
   1049       createBasicBlock("seqcst", CurFn)
   1050     };
   1051     llvm::AtomicOrdering Orders[5] = {
   1052       llvm::Monotonic, llvm::Acquire, llvm::Release,
   1053       llvm::AcquireRelease, llvm::SequentiallyConsistent
   1054     };
   1055 
   1056     Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
   1057     llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
   1058 
   1059     Builder.SetInsertPoint(ContBB);
   1060     PHINode *Result = Builder.CreatePHI(Int8Ty, 5, "was_set");
   1061 
   1062     for (unsigned i = 0; i < 5; ++i) {
   1063       Builder.SetInsertPoint(BBs[i]);
   1064       AtomicRMWInst *RMW = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
   1065                                                    Ptr, NewVal, Orders[i]);
   1066       RMW->setVolatile(Volatile);
   1067       Result->addIncoming(RMW, BBs[i]);
   1068       Builder.CreateBr(ContBB);
   1069     }
   1070 
   1071     SI->addCase(Builder.getInt32(0), BBs[0]);
   1072     SI->addCase(Builder.getInt32(1), BBs[1]);
   1073     SI->addCase(Builder.getInt32(2), BBs[1]);
   1074     SI->addCase(Builder.getInt32(3), BBs[2]);
   1075     SI->addCase(Builder.getInt32(4), BBs[3]);
   1076     SI->addCase(Builder.getInt32(5), BBs[4]);
   1077 
   1078     Builder.SetInsertPoint(ContBB);
   1079     return RValue::get(Builder.CreateIsNotNull(Result, "tobool"));
   1080   }
   1081 
   1082   case Builtin::BI__atomic_clear: {
   1083     QualType PtrTy = E->getArg(0)->IgnoreImpCasts()->getType();
   1084     bool Volatile =
   1085         PtrTy->castAs<PointerType>()->getPointeeType().isVolatileQualified();
   1086 
   1087     Value *Ptr = EmitScalarExpr(E->getArg(0));
   1088     unsigned AddrSpace =
   1089         cast<llvm::PointerType>(Ptr->getType())->getAddressSpace();
   1090     Ptr = Builder.CreateBitCast(Ptr, Int8Ty->getPointerTo(AddrSpace));
   1091     Value *NewVal = Builder.getInt8(0);
   1092     Value *Order = EmitScalarExpr(E->getArg(1));
   1093     if (isa<llvm::ConstantInt>(Order)) {
   1094       int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
   1095       StoreInst *Store = Builder.CreateStore(NewVal, Ptr, Volatile);
   1096       Store->setAlignment(1);
   1097       switch (ord) {
   1098       case 0:  // memory_order_relaxed
   1099       default: // invalid order
   1100         Store->setOrdering(llvm::Monotonic);
   1101         break;
   1102       case 3:  // memory_order_release
   1103         Store->setOrdering(llvm::Release);
   1104         break;
   1105       case 5:  // memory_order_seq_cst
   1106         Store->setOrdering(llvm::SequentiallyConsistent);
   1107         break;
   1108       }
   1109       return RValue::get(0);
   1110     }
   1111 
   1112     llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
   1113 
   1114     llvm::BasicBlock *BBs[3] = {
   1115       createBasicBlock("monotonic", CurFn),
   1116       createBasicBlock("release", CurFn),
   1117       createBasicBlock("seqcst", CurFn)
   1118     };
   1119     llvm::AtomicOrdering Orders[3] = {
   1120       llvm::Monotonic, llvm::Release, llvm::SequentiallyConsistent
   1121     };
   1122 
   1123     Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
   1124     llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
   1125 
   1126     for (unsigned i = 0; i < 3; ++i) {
   1127       Builder.SetInsertPoint(BBs[i]);
   1128       StoreInst *Store = Builder.CreateStore(NewVal, Ptr, Volatile);
   1129       Store->setAlignment(1);
   1130       Store->setOrdering(Orders[i]);
   1131       Builder.CreateBr(ContBB);
   1132     }
   1133 
   1134     SI->addCase(Builder.getInt32(0), BBs[0]);
   1135     SI->addCase(Builder.getInt32(3), BBs[1]);
   1136     SI->addCase(Builder.getInt32(5), BBs[2]);
   1137 
   1138     Builder.SetInsertPoint(ContBB);
   1139     return RValue::get(0);
   1140   }
   1141 
   1142   case Builtin::BI__atomic_thread_fence:
   1143   case Builtin::BI__atomic_signal_fence:
   1144   case Builtin::BI__c11_atomic_thread_fence:
   1145   case Builtin::BI__c11_atomic_signal_fence: {
   1146     llvm::SynchronizationScope Scope;
   1147     if (BuiltinID == Builtin::BI__atomic_signal_fence ||
   1148         BuiltinID == Builtin::BI__c11_atomic_signal_fence)
   1149       Scope = llvm::SingleThread;
   1150     else
   1151       Scope = llvm::CrossThread;
   1152     Value *Order = EmitScalarExpr(E->getArg(0));
   1153     if (isa<llvm::ConstantInt>(Order)) {
   1154       int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
   1155       switch (ord) {
   1156       case 0:  // memory_order_relaxed
   1157       default: // invalid order
   1158         break;
   1159       case 1:  // memory_order_consume
   1160       case 2:  // memory_order_acquire
   1161         Builder.CreateFence(llvm::Acquire, Scope);
   1162         break;
   1163       case 3:  // memory_order_release
   1164         Builder.CreateFence(llvm::Release, Scope);
   1165         break;
   1166       case 4:  // memory_order_acq_rel
   1167         Builder.CreateFence(llvm::AcquireRelease, Scope);
   1168         break;
   1169       case 5:  // memory_order_seq_cst
   1170         Builder.CreateFence(llvm::SequentiallyConsistent, Scope);
   1171         break;
   1172       }
   1173       return RValue::get(0);
   1174     }
   1175 
   1176     llvm::BasicBlock *AcquireBB, *ReleaseBB, *AcqRelBB, *SeqCstBB;
   1177     AcquireBB = createBasicBlock("acquire", CurFn);
   1178     ReleaseBB = createBasicBlock("release", CurFn);
   1179     AcqRelBB = createBasicBlock("acqrel", CurFn);
   1180     SeqCstBB = createBasicBlock("seqcst", CurFn);
   1181     llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
   1182 
   1183     Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
   1184     llvm::SwitchInst *SI = Builder.CreateSwitch(Order, ContBB);
   1185 
   1186     Builder.SetInsertPoint(AcquireBB);
   1187     Builder.CreateFence(llvm::Acquire, Scope);
   1188     Builder.CreateBr(ContBB);
   1189     SI->addCase(Builder.getInt32(1), AcquireBB);
   1190     SI->addCase(Builder.getInt32(2), AcquireBB);
   1191 
   1192     Builder.SetInsertPoint(ReleaseBB);
   1193     Builder.CreateFence(llvm::Release, Scope);
   1194     Builder.CreateBr(ContBB);
   1195     SI->addCase(Builder.getInt32(3), ReleaseBB);
   1196 
   1197     Builder.SetInsertPoint(AcqRelBB);
   1198     Builder.CreateFence(llvm::AcquireRelease, Scope);
   1199     Builder.CreateBr(ContBB);
   1200     SI->addCase(Builder.getInt32(4), AcqRelBB);
   1201 
   1202     Builder.SetInsertPoint(SeqCstBB);
   1203     Builder.CreateFence(llvm::SequentiallyConsistent, Scope);
   1204     Builder.CreateBr(ContBB);
   1205     SI->addCase(Builder.getInt32(5), SeqCstBB);
   1206 
   1207     Builder.SetInsertPoint(ContBB);
   1208     return RValue::get(0);
   1209   }
   1210 
   1211     // Library functions with special handling.
   1212   case Builtin::BIsqrt:
   1213   case Builtin::BIsqrtf:
   1214   case Builtin::BIsqrtl: {
   1215     // TODO: there is currently no set of optimizer flags
   1216     // sufficient for us to rewrite sqrt to @llvm.sqrt.
   1217     // -fmath-errno=0 is not good enough; we need finiteness.
   1218     // We could probably precondition the call with an ult
   1219     // against 0, but is that worth the complexity?
   1220     break;
   1221   }
   1222 
   1223   case Builtin::BIpow:
   1224   case Builtin::BIpowf:
   1225   case Builtin::BIpowl: {
   1226     // Rewrite sqrt to intrinsic if allowed.
   1227     if (!FD->hasAttr<ConstAttr>())
   1228       break;
   1229     Value *Base = EmitScalarExpr(E->getArg(0));
   1230     Value *Exponent = EmitScalarExpr(E->getArg(1));
   1231     llvm::Type *ArgType = Base->getType();
   1232     Value *F = CGM.getIntrinsic(Intrinsic::pow, ArgType);
   1233     return RValue::get(Builder.CreateCall2(F, Base, Exponent));
   1234   }
   1235 
   1236   case Builtin::BIfma:
   1237   case Builtin::BIfmaf:
   1238   case Builtin::BIfmal:
   1239   case Builtin::BI__builtin_fma:
   1240   case Builtin::BI__builtin_fmaf:
   1241   case Builtin::BI__builtin_fmal: {
   1242     // Rewrite fma to intrinsic.
   1243     Value *FirstArg = EmitScalarExpr(E->getArg(0));
   1244     llvm::Type *ArgType = FirstArg->getType();
   1245     Value *F = CGM.getIntrinsic(Intrinsic::fma, ArgType);
   1246     return RValue::get(Builder.CreateCall3(F, FirstArg,
   1247                                               EmitScalarExpr(E->getArg(1)),
   1248                                               EmitScalarExpr(E->getArg(2))));
   1249   }
   1250 
   1251   case Builtin::BI__builtin_signbit:
   1252   case Builtin::BI__builtin_signbitf:
   1253   case Builtin::BI__builtin_signbitl: {
   1254     LLVMContext &C = CGM.getLLVMContext();
   1255 
   1256     Value *Arg = EmitScalarExpr(E->getArg(0));
   1257     llvm::Type *ArgTy = Arg->getType();
   1258     if (ArgTy->isPPC_FP128Ty())
   1259       break; // FIXME: I'm not sure what the right implementation is here.
   1260     int ArgWidth = ArgTy->getPrimitiveSizeInBits();
   1261     llvm::Type *ArgIntTy = llvm::IntegerType::get(C, ArgWidth);
   1262     Value *BCArg = Builder.CreateBitCast(Arg, ArgIntTy);
   1263     Value *ZeroCmp = llvm::Constant::getNullValue(ArgIntTy);
   1264     Value *Result = Builder.CreateICmpSLT(BCArg, ZeroCmp);
   1265     return RValue::get(Builder.CreateZExt(Result, ConvertType(E->getType())));
   1266   }
   1267   case Builtin::BI__builtin_annotation: {
   1268     llvm::Value *AnnVal = EmitScalarExpr(E->getArg(0));
   1269     llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::annotation,
   1270                                       AnnVal->getType());
   1271 
   1272     // Get the annotation string, go through casts. Sema requires this to be a
   1273     // non-wide string literal, potentially casted, so the cast<> is safe.
   1274     const Expr *AnnotationStrExpr = E->getArg(1)->IgnoreParenCasts();
   1275     llvm::StringRef Str = cast<StringLiteral>(AnnotationStrExpr)->getString();
   1276     return RValue::get(EmitAnnotationCall(F, AnnVal, Str, E->getExprLoc()));
   1277   }
   1278   }
   1279 
   1280   // If this is an alias for a lib function (e.g. __builtin_sin), emit
   1281   // the call using the normal call path, but using the unmangled
   1282   // version of the function name.
   1283   if (getContext().BuiltinInfo.isLibFunction(BuiltinID))
   1284     return emitLibraryCall(*this, FD, E,
   1285                            CGM.getBuiltinLibFunction(FD, BuiltinID));
   1286 
   1287   // If this is a predefined lib function (e.g. malloc), emit the call
   1288   // using exactly the normal call path.
   1289   if (getContext().BuiltinInfo.isPredefinedLibFunction(BuiltinID))
   1290     return emitLibraryCall(*this, FD, E, EmitScalarExpr(E->getCallee()));
   1291 
   1292   // See if we have a target specific intrinsic.
   1293   const char *Name = getContext().BuiltinInfo.GetName(BuiltinID);
   1294   Intrinsic::ID IntrinsicID = Intrinsic::not_intrinsic;
   1295   if (const char *Prefix =
   1296       llvm::Triple::getArchTypePrefix(Target.getTriple().getArch()))
   1297     IntrinsicID = Intrinsic::getIntrinsicForGCCBuiltin(Prefix, Name);
   1298 
   1299   if (IntrinsicID != Intrinsic::not_intrinsic) {
   1300     SmallVector<Value*, 16> Args;
   1301 
   1302     // Find out if any arguments are required to be integer constant
   1303     // expressions.
   1304     unsigned ICEArguments = 0;
   1305     ASTContext::GetBuiltinTypeError Error;
   1306     getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
   1307     assert(Error == ASTContext::GE_None && "Should not codegen an error");
   1308 
   1309     Function *F = CGM.getIntrinsic(IntrinsicID);
   1310     llvm::FunctionType *FTy = F->getFunctionType();
   1311 
   1312     for (unsigned i = 0, e = E->getNumArgs(); i != e; ++i) {
   1313       Value *ArgValue;
   1314       // If this is a normal argument, just emit it as a scalar.
   1315       if ((ICEArguments & (1 << i)) == 0) {
   1316         ArgValue = EmitScalarExpr(E->getArg(i));
   1317       } else {
   1318         // If this is required to be a constant, constant fold it so that we
   1319         // know that the generated intrinsic gets a ConstantInt.
   1320         llvm::APSInt Result;
   1321         bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result,getContext());
   1322         assert(IsConst && "Constant arg isn't actually constant?");
   1323         (void)IsConst;
   1324         ArgValue = llvm::ConstantInt::get(getLLVMContext(), Result);
   1325       }
   1326 
   1327       // If the intrinsic arg type is different from the builtin arg type
   1328       // we need to do a bit cast.
   1329       llvm::Type *PTy = FTy->getParamType(i);
   1330       if (PTy != ArgValue->getType()) {
   1331         assert(PTy->canLosslesslyBitCastTo(FTy->getParamType(i)) &&
   1332                "Must be able to losslessly bit cast to param");
   1333         ArgValue = Builder.CreateBitCast(ArgValue, PTy);
   1334       }
   1335 
   1336       Args.push_back(ArgValue);
   1337     }
   1338 
   1339     Value *V = Builder.CreateCall(F, Args);
   1340     QualType BuiltinRetType = E->getType();
   1341 
   1342     llvm::Type *RetTy = VoidTy;
   1343     if (!BuiltinRetType->isVoidType())
   1344       RetTy = ConvertType(BuiltinRetType);
   1345 
   1346     if (RetTy != V->getType()) {
   1347       assert(V->getType()->canLosslesslyBitCastTo(RetTy) &&
   1348              "Must be able to losslessly bit cast result type");
   1349       V = Builder.CreateBitCast(V, RetTy);
   1350     }
   1351 
   1352     return RValue::get(V);
   1353   }
   1354 
   1355   // See if we have a target specific builtin that needs to be lowered.
   1356   if (Value *V = EmitTargetBuiltinExpr(BuiltinID, E))
   1357     return RValue::get(V);
   1358 
   1359   ErrorUnsupported(E, "builtin function");
   1360 
   1361   // Unknown builtin, for now just dump it out and return undef.
   1362   if (hasAggregateLLVMType(E->getType()))
   1363     return RValue::getAggregate(CreateMemTemp(E->getType()));
   1364   return RValue::get(llvm::UndefValue::get(ConvertType(E->getType())));
   1365 }
   1366 
   1367 Value *CodeGenFunction::EmitTargetBuiltinExpr(unsigned BuiltinID,
   1368                                               const CallExpr *E) {
   1369   switch (Target.getTriple().getArch()) {
   1370   case llvm::Triple::arm:
   1371   case llvm::Triple::thumb:
   1372     return EmitARMBuiltinExpr(BuiltinID, E);
   1373   case llvm::Triple::x86:
   1374   case llvm::Triple::x86_64:
   1375     return EmitX86BuiltinExpr(BuiltinID, E);
   1376   case llvm::Triple::ppc:
   1377   case llvm::Triple::ppc64:
   1378     return EmitPPCBuiltinExpr(BuiltinID, E);
   1379   case llvm::Triple::hexagon:
   1380     return EmitHexagonBuiltinExpr(BuiltinID, E);
   1381   default:
   1382     return 0;
   1383   }
   1384 }
   1385 
   1386 static llvm::VectorType *GetNeonType(CodeGenFunction *CGF,
   1387                                      NeonTypeFlags TypeFlags) {
   1388   int IsQuad = TypeFlags.isQuad();
   1389   switch (TypeFlags.getEltType()) {
   1390   case NeonTypeFlags::Int8:
   1391   case NeonTypeFlags::Poly8:
   1392     return llvm::VectorType::get(CGF->Int8Ty, 8 << IsQuad);
   1393   case NeonTypeFlags::Int16:
   1394   case NeonTypeFlags::Poly16:
   1395   case NeonTypeFlags::Float16:
   1396     return llvm::VectorType::get(CGF->Int16Ty, 4 << IsQuad);
   1397   case NeonTypeFlags::Int32:
   1398     return llvm::VectorType::get(CGF->Int32Ty, 2 << IsQuad);
   1399   case NeonTypeFlags::Int64:
   1400     return llvm::VectorType::get(CGF->Int64Ty, 1 << IsQuad);
   1401   case NeonTypeFlags::Float32:
   1402     return llvm::VectorType::get(CGF->FloatTy, 2 << IsQuad);
   1403   }
   1404   llvm_unreachable("Invalid NeonTypeFlags element type!");
   1405 }
   1406 
   1407 Value *CodeGenFunction::EmitNeonSplat(Value *V, Constant *C) {
   1408   unsigned nElts = cast<llvm::VectorType>(V->getType())->getNumElements();
   1409   Value* SV = llvm::ConstantVector::getSplat(nElts, C);
   1410   return Builder.CreateShuffleVector(V, V, SV, "lane");
   1411 }
   1412 
   1413 Value *CodeGenFunction::EmitNeonCall(Function *F, SmallVectorImpl<Value*> &Ops,
   1414                                      const char *name,
   1415                                      unsigned shift, bool rightshift) {
   1416   unsigned j = 0;
   1417   for (Function::const_arg_iterator ai = F->arg_begin(), ae = F->arg_end();
   1418        ai != ae; ++ai, ++j)
   1419     if (shift > 0 && shift == j)
   1420       Ops[j] = EmitNeonShiftVector(Ops[j], ai->getType(), rightshift);
   1421     else
   1422       Ops[j] = Builder.CreateBitCast(Ops[j], ai->getType(), name);
   1423 
   1424   return Builder.CreateCall(F, Ops, name);
   1425 }
   1426 
   1427 Value *CodeGenFunction::EmitNeonShiftVector(Value *V, llvm::Type *Ty,
   1428                                             bool neg) {
   1429   int SV = cast<ConstantInt>(V)->getSExtValue();
   1430 
   1431   llvm::VectorType *VTy = cast<llvm::VectorType>(Ty);
   1432   llvm::Constant *C = ConstantInt::get(VTy->getElementType(), neg ? -SV : SV);
   1433   return llvm::ConstantVector::getSplat(VTy->getNumElements(), C);
   1434 }
   1435 
   1436 /// GetPointeeAlignment - Given an expression with a pointer type, find the
   1437 /// alignment of the type referenced by the pointer.  Skip over implicit
   1438 /// casts.
   1439 unsigned CodeGenFunction::GetPointeeAlignment(const Expr *Addr) {
   1440   unsigned Align = 1;
   1441   // Check if the type is a pointer.  The implicit cast operand might not be.
   1442   while (Addr->getType()->isPointerType()) {
   1443     QualType PtTy = Addr->getType()->getPointeeType();
   1444 
   1445     // Can't get alignment of incomplete types.
   1446     if (!PtTy->isIncompleteType()) {
   1447       unsigned NewA = getContext().getTypeAlignInChars(PtTy).getQuantity();
   1448       if (NewA > Align)
   1449         Align = NewA;
   1450     }
   1451 
   1452     // If the address is an implicit cast, repeat with the cast operand.
   1453     if (const ImplicitCastExpr *CastAddr = dyn_cast<ImplicitCastExpr>(Addr)) {
   1454       Addr = CastAddr->getSubExpr();
   1455       continue;
   1456     }
   1457     break;
   1458   }
   1459   return Align;
   1460 }
   1461 
   1462 /// GetPointeeAlignmentValue - Given an expression with a pointer type, find
   1463 /// the alignment of the type referenced by the pointer.  Skip over implicit
   1464 /// casts.  Return the alignment as an llvm::Value.
   1465 Value *CodeGenFunction::GetPointeeAlignmentValue(const Expr *Addr) {
   1466   return llvm::ConstantInt::get(Int32Ty, GetPointeeAlignment(Addr));
   1467 }
   1468 
   1469 Value *CodeGenFunction::EmitARMBuiltinExpr(unsigned BuiltinID,
   1470                                            const CallExpr *E) {
   1471   if (BuiltinID == ARM::BI__clear_cache) {
   1472     const FunctionDecl *FD = E->getDirectCallee();
   1473     // Oddly people write this call without args on occasion and gcc accepts
   1474     // it - it's also marked as varargs in the description file.
   1475     SmallVector<Value*, 2> Ops;
   1476     for (unsigned i = 0; i < E->getNumArgs(); i++)
   1477       Ops.push_back(EmitScalarExpr(E->getArg(i)));
   1478     llvm::Type *Ty = CGM.getTypes().ConvertType(FD->getType());
   1479     llvm::FunctionType *FTy = cast<llvm::FunctionType>(Ty);
   1480     StringRef Name = FD->getName();
   1481     return Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name), Ops);
   1482   }
   1483 
   1484   if (BuiltinID == ARM::BI__builtin_arm_ldrexd) {
   1485     Function *F = CGM.getIntrinsic(Intrinsic::arm_ldrexd);
   1486 
   1487     Value *LdPtr = EmitScalarExpr(E->getArg(0));
   1488     Value *Val = Builder.CreateCall(F, LdPtr, "ldrexd");
   1489 
   1490     Value *Val0 = Builder.CreateExtractValue(Val, 1);
   1491     Value *Val1 = Builder.CreateExtractValue(Val, 0);
   1492     Val0 = Builder.CreateZExt(Val0, Int64Ty);
   1493     Val1 = Builder.CreateZExt(Val1, Int64Ty);
   1494 
   1495     Value *ShiftCst = llvm::ConstantInt::get(Int64Ty, 32);
   1496     Val = Builder.CreateShl(Val0, ShiftCst, "shl", true /* nuw */);
   1497     return Builder.CreateOr(Val, Val1);
   1498   }
   1499 
   1500   if (BuiltinID == ARM::BI__builtin_arm_strexd) {
   1501     Function *F = CGM.getIntrinsic(Intrinsic::arm_strexd);
   1502     llvm::Type *STy = llvm::StructType::get(Int32Ty, Int32Ty, NULL);
   1503 
   1504     Value *One = llvm::ConstantInt::get(Int32Ty, 1);
   1505     Value *Tmp = Builder.CreateAlloca(Int64Ty, One);
   1506     Value *Val = EmitScalarExpr(E->getArg(0));
   1507     Builder.CreateStore(Val, Tmp);
   1508 
   1509     Value *LdPtr = Builder.CreateBitCast(Tmp,llvm::PointerType::getUnqual(STy));
   1510     Val = Builder.CreateLoad(LdPtr);
   1511 
   1512     Value *Arg0 = Builder.CreateExtractValue(Val, 0);
   1513     Value *Arg1 = Builder.CreateExtractValue(Val, 1);
   1514     Value *StPtr = EmitScalarExpr(E->getArg(1));
   1515     return Builder.CreateCall3(F, Arg0, Arg1, StPtr, "strexd");
   1516   }
   1517 
   1518   SmallVector<Value*, 4> Ops;
   1519   for (unsigned i = 0, e = E->getNumArgs() - 1; i != e; i++)
   1520     Ops.push_back(EmitScalarExpr(E->getArg(i)));
   1521 
   1522   // vget_lane and vset_lane are not overloaded and do not have an extra
   1523   // argument that specifies the vector type.
   1524   switch (BuiltinID) {
   1525   default: break;
   1526   case ARM::BI__builtin_neon_vget_lane_i8:
   1527   case ARM::BI__builtin_neon_vget_lane_i16:
   1528   case ARM::BI__builtin_neon_vget_lane_i32:
   1529   case ARM::BI__builtin_neon_vget_lane_i64:
   1530   case ARM::BI__builtin_neon_vget_lane_f32:
   1531   case ARM::BI__builtin_neon_vgetq_lane_i8:
   1532   case ARM::BI__builtin_neon_vgetq_lane_i16:
   1533   case ARM::BI__builtin_neon_vgetq_lane_i32:
   1534   case ARM::BI__builtin_neon_vgetq_lane_i64:
   1535   case ARM::BI__builtin_neon_vgetq_lane_f32:
   1536     return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
   1537                                         "vget_lane");
   1538   case ARM::BI__builtin_neon_vset_lane_i8:
   1539   case ARM::BI__builtin_neon_vset_lane_i16:
   1540   case ARM::BI__builtin_neon_vset_lane_i32:
   1541   case ARM::BI__builtin_neon_vset_lane_i64:
   1542   case ARM::BI__builtin_neon_vset_lane_f32:
   1543   case ARM::BI__builtin_neon_vsetq_lane_i8:
   1544   case ARM::BI__builtin_neon_vsetq_lane_i16:
   1545   case ARM::BI__builtin_neon_vsetq_lane_i32:
   1546   case ARM::BI__builtin_neon_vsetq_lane_i64:
   1547   case ARM::BI__builtin_neon_vsetq_lane_f32:
   1548     Ops.push_back(EmitScalarExpr(E->getArg(2)));
   1549     return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane");
   1550   }
   1551 
   1552   // Get the last argument, which specifies the vector type.
   1553   llvm::APSInt Result;
   1554   const Expr *Arg = E->getArg(E->getNumArgs()-1);
   1555   if (!Arg->isIntegerConstantExpr(Result, getContext()))
   1556     return 0;
   1557 
   1558   if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f ||
   1559       BuiltinID == ARM::BI__builtin_arm_vcvtr_d) {
   1560     // Determine the overloaded type of this builtin.
   1561     llvm::Type *Ty;
   1562     if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f)
   1563       Ty = FloatTy;
   1564     else
   1565       Ty = DoubleTy;
   1566 
   1567     // Determine whether this is an unsigned conversion or not.
   1568     bool usgn = Result.getZExtValue() == 1;
   1569     unsigned Int = usgn ? Intrinsic::arm_vcvtru : Intrinsic::arm_vcvtr;
   1570 
   1571     // Call the appropriate intrinsic.
   1572     Function *F = CGM.getIntrinsic(Int, Ty);
   1573     return Builder.CreateCall(F, Ops, "vcvtr");
   1574   }
   1575 
   1576   // Determine the type of this overloaded NEON intrinsic.
   1577   NeonTypeFlags Type(Result.getZExtValue());
   1578   bool usgn = Type.isUnsigned();
   1579   bool quad = Type.isQuad();
   1580   bool rightShift = false;
   1581 
   1582   llvm::VectorType *VTy = GetNeonType(this, Type);
   1583   llvm::Type *Ty = VTy;
   1584   if (!Ty)
   1585     return 0;
   1586 
   1587   unsigned Int;
   1588   switch (BuiltinID) {
   1589   default: return 0;
   1590   case ARM::BI__builtin_neon_vabd_v:
   1591   case ARM::BI__builtin_neon_vabdq_v:
   1592     Int = usgn ? Intrinsic::arm_neon_vabdu : Intrinsic::arm_neon_vabds;
   1593     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vabd");
   1594   case ARM::BI__builtin_neon_vabs_v:
   1595   case ARM::BI__builtin_neon_vabsq_v:
   1596     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vabs, Ty),
   1597                         Ops, "vabs");
   1598   case ARM::BI__builtin_neon_vaddhn_v:
   1599     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vaddhn, Ty),
   1600                         Ops, "vaddhn");
   1601   case ARM::BI__builtin_neon_vcale_v:
   1602     std::swap(Ops[0], Ops[1]);
   1603   case ARM::BI__builtin_neon_vcage_v: {
   1604     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacged);
   1605     return EmitNeonCall(F, Ops, "vcage");
   1606   }
   1607   case ARM::BI__builtin_neon_vcaleq_v:
   1608     std::swap(Ops[0], Ops[1]);
   1609   case ARM::BI__builtin_neon_vcageq_v: {
   1610     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacgeq);
   1611     return EmitNeonCall(F, Ops, "vcage");
   1612   }
   1613   case ARM::BI__builtin_neon_vcalt_v:
   1614     std::swap(Ops[0], Ops[1]);
   1615   case ARM::BI__builtin_neon_vcagt_v: {
   1616     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacgtd);
   1617     return EmitNeonCall(F, Ops, "vcagt");
   1618   }
   1619   case ARM::BI__builtin_neon_vcaltq_v:
   1620     std::swap(Ops[0], Ops[1]);
   1621   case ARM::BI__builtin_neon_vcagtq_v: {
   1622     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacgtq);
   1623     return EmitNeonCall(F, Ops, "vcagt");
   1624   }
   1625   case ARM::BI__builtin_neon_vcls_v:
   1626   case ARM::BI__builtin_neon_vclsq_v: {
   1627     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcls, Ty);
   1628     return EmitNeonCall(F, Ops, "vcls");
   1629   }
   1630   case ARM::BI__builtin_neon_vclz_v:
   1631   case ARM::BI__builtin_neon_vclzq_v: {
   1632     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vclz, Ty);
   1633     return EmitNeonCall(F, Ops, "vclz");
   1634   }
   1635   case ARM::BI__builtin_neon_vcnt_v:
   1636   case ARM::BI__builtin_neon_vcntq_v: {
   1637     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcnt, Ty);
   1638     return EmitNeonCall(F, Ops, "vcnt");
   1639   }
   1640   case ARM::BI__builtin_neon_vcvt_f16_v: {
   1641     assert(Type.getEltType() == NeonTypeFlags::Float16 && !quad &&
   1642            "unexpected vcvt_f16_v builtin");
   1643     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcvtfp2hf);
   1644     return EmitNeonCall(F, Ops, "vcvt");
   1645   }
   1646   case ARM::BI__builtin_neon_vcvt_f32_f16: {
   1647     assert(Type.getEltType() == NeonTypeFlags::Float16 && !quad &&
   1648            "unexpected vcvt_f32_f16 builtin");
   1649     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcvthf2fp);
   1650     return EmitNeonCall(F, Ops, "vcvt");
   1651   }
   1652   case ARM::BI__builtin_neon_vcvt_f32_v:
   1653   case ARM::BI__builtin_neon_vcvtq_f32_v:
   1654     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1655     Ty = GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
   1656     return usgn ? Builder.CreateUIToFP(Ops[0], Ty, "vcvt")
   1657                 : Builder.CreateSIToFP(Ops[0], Ty, "vcvt");
   1658   case ARM::BI__builtin_neon_vcvt_s32_v:
   1659   case ARM::BI__builtin_neon_vcvt_u32_v:
   1660   case ARM::BI__builtin_neon_vcvtq_s32_v:
   1661   case ARM::BI__builtin_neon_vcvtq_u32_v: {
   1662     llvm::Type *FloatTy =
   1663       GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
   1664     Ops[0] = Builder.CreateBitCast(Ops[0], FloatTy);
   1665     return usgn ? Builder.CreateFPToUI(Ops[0], Ty, "vcvt")
   1666                 : Builder.CreateFPToSI(Ops[0], Ty, "vcvt");
   1667   }
   1668   case ARM::BI__builtin_neon_vcvt_n_f32_v:
   1669   case ARM::BI__builtin_neon_vcvtq_n_f32_v: {
   1670     llvm::Type *FloatTy =
   1671       GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
   1672     llvm::Type *Tys[2] = { FloatTy, Ty };
   1673     Int = usgn ? Intrinsic::arm_neon_vcvtfxu2fp
   1674                : Intrinsic::arm_neon_vcvtfxs2fp;
   1675     Function *F = CGM.getIntrinsic(Int, Tys);
   1676     return EmitNeonCall(F, Ops, "vcvt_n");
   1677   }
   1678   case ARM::BI__builtin_neon_vcvt_n_s32_v:
   1679   case ARM::BI__builtin_neon_vcvt_n_u32_v:
   1680   case ARM::BI__builtin_neon_vcvtq_n_s32_v:
   1681   case ARM::BI__builtin_neon_vcvtq_n_u32_v: {
   1682     llvm::Type *FloatTy =
   1683       GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
   1684     llvm::Type *Tys[2] = { Ty, FloatTy };
   1685     Int = usgn ? Intrinsic::arm_neon_vcvtfp2fxu
   1686                : Intrinsic::arm_neon_vcvtfp2fxs;
   1687     Function *F = CGM.getIntrinsic(Int, Tys);
   1688     return EmitNeonCall(F, Ops, "vcvt_n");
   1689   }
   1690   case ARM::BI__builtin_neon_vext_v:
   1691   case ARM::BI__builtin_neon_vextq_v: {
   1692     int CV = cast<ConstantInt>(Ops[2])->getSExtValue();
   1693     SmallVector<Constant*, 16> Indices;
   1694     for (unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
   1695       Indices.push_back(ConstantInt::get(Int32Ty, i+CV));
   1696 
   1697     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1698     Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
   1699     Value *SV = llvm::ConstantVector::get(Indices);
   1700     return Builder.CreateShuffleVector(Ops[0], Ops[1], SV, "vext");
   1701   }
   1702   case ARM::BI__builtin_neon_vhadd_v:
   1703   case ARM::BI__builtin_neon_vhaddq_v:
   1704     Int = usgn ? Intrinsic::arm_neon_vhaddu : Intrinsic::arm_neon_vhadds;
   1705     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vhadd");
   1706   case ARM::BI__builtin_neon_vhsub_v:
   1707   case ARM::BI__builtin_neon_vhsubq_v:
   1708     Int = usgn ? Intrinsic::arm_neon_vhsubu : Intrinsic::arm_neon_vhsubs;
   1709     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vhsub");
   1710   case ARM::BI__builtin_neon_vld1_v:
   1711   case ARM::BI__builtin_neon_vld1q_v:
   1712     Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
   1713     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vld1, Ty),
   1714                         Ops, "vld1");
   1715   case ARM::BI__builtin_neon_vld1_lane_v:
   1716   case ARM::BI__builtin_neon_vld1q_lane_v: {
   1717     Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
   1718     Ty = llvm::PointerType::getUnqual(VTy->getElementType());
   1719     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1720     LoadInst *Ld = Builder.CreateLoad(Ops[0]);
   1721     Value *Align = GetPointeeAlignmentValue(E->getArg(0));
   1722     Ld->setAlignment(cast<ConstantInt>(Align)->getZExtValue());
   1723     return Builder.CreateInsertElement(Ops[1], Ld, Ops[2], "vld1_lane");
   1724   }
   1725   case ARM::BI__builtin_neon_vld1_dup_v:
   1726   case ARM::BI__builtin_neon_vld1q_dup_v: {
   1727     Value *V = UndefValue::get(Ty);
   1728     Ty = llvm::PointerType::getUnqual(VTy->getElementType());
   1729     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1730     LoadInst *Ld = Builder.CreateLoad(Ops[0]);
   1731     Value *Align = GetPointeeAlignmentValue(E->getArg(0));
   1732     Ld->setAlignment(cast<ConstantInt>(Align)->getZExtValue());
   1733     llvm::Constant *CI = ConstantInt::get(Int32Ty, 0);
   1734     Ops[0] = Builder.CreateInsertElement(V, Ld, CI);
   1735     return EmitNeonSplat(Ops[0], CI);
   1736   }
   1737   case ARM::BI__builtin_neon_vld2_v:
   1738   case ARM::BI__builtin_neon_vld2q_v: {
   1739     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld2, Ty);
   1740     Value *Align = GetPointeeAlignmentValue(E->getArg(1));
   1741     Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld2");
   1742     Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
   1743     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1744     return Builder.CreateStore(Ops[1], Ops[0]);
   1745   }
   1746   case ARM::BI__builtin_neon_vld3_v:
   1747   case ARM::BI__builtin_neon_vld3q_v: {
   1748     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld3, Ty);
   1749     Value *Align = GetPointeeAlignmentValue(E->getArg(1));
   1750     Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld3");
   1751     Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
   1752     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1753     return Builder.CreateStore(Ops[1], Ops[0]);
   1754   }
   1755   case ARM::BI__builtin_neon_vld4_v:
   1756   case ARM::BI__builtin_neon_vld4q_v: {
   1757     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld4, Ty);
   1758     Value *Align = GetPointeeAlignmentValue(E->getArg(1));
   1759     Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld4");
   1760     Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
   1761     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1762     return Builder.CreateStore(Ops[1], Ops[0]);
   1763   }
   1764   case ARM::BI__builtin_neon_vld2_lane_v:
   1765   case ARM::BI__builtin_neon_vld2q_lane_v: {
   1766     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld2lane, Ty);
   1767     Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
   1768     Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
   1769     Ops.push_back(GetPointeeAlignmentValue(E->getArg(1)));
   1770     Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld2_lane");
   1771     Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
   1772     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1773     return Builder.CreateStore(Ops[1], Ops[0]);
   1774   }
   1775   case ARM::BI__builtin_neon_vld3_lane_v:
   1776   case ARM::BI__builtin_neon_vld3q_lane_v: {
   1777     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld3lane, Ty);
   1778     Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
   1779     Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
   1780     Ops[4] = Builder.CreateBitCast(Ops[4], Ty);
   1781     Ops.push_back(GetPointeeAlignmentValue(E->getArg(1)));
   1782     Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld3_lane");
   1783     Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
   1784     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1785     return Builder.CreateStore(Ops[1], Ops[0]);
   1786   }
   1787   case ARM::BI__builtin_neon_vld4_lane_v:
   1788   case ARM::BI__builtin_neon_vld4q_lane_v: {
   1789     Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld4lane, Ty);
   1790     Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
   1791     Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
   1792     Ops[4] = Builder.CreateBitCast(Ops[4], Ty);
   1793     Ops[5] = Builder.CreateBitCast(Ops[5], Ty);
   1794     Ops.push_back(GetPointeeAlignmentValue(E->getArg(1)));
   1795     Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld3_lane");
   1796     Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
   1797     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1798     return Builder.CreateStore(Ops[1], Ops[0]);
   1799   }
   1800   case ARM::BI__builtin_neon_vld2_dup_v:
   1801   case ARM::BI__builtin_neon_vld3_dup_v:
   1802   case ARM::BI__builtin_neon_vld4_dup_v: {
   1803     // Handle 64-bit elements as a special-case.  There is no "dup" needed.
   1804     if (VTy->getElementType()->getPrimitiveSizeInBits() == 64) {
   1805       switch (BuiltinID) {
   1806       case ARM::BI__builtin_neon_vld2_dup_v:
   1807         Int = Intrinsic::arm_neon_vld2;
   1808         break;
   1809       case ARM::BI__builtin_neon_vld3_dup_v:
   1810         Int = Intrinsic::arm_neon_vld3;
   1811         break;
   1812       case ARM::BI__builtin_neon_vld4_dup_v:
   1813         Int = Intrinsic::arm_neon_vld4;
   1814         break;
   1815       default: llvm_unreachable("unknown vld_dup intrinsic?");
   1816       }
   1817       Function *F = CGM.getIntrinsic(Int, Ty);
   1818       Value *Align = GetPointeeAlignmentValue(E->getArg(1));
   1819       Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld_dup");
   1820       Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
   1821       Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1822       return Builder.CreateStore(Ops[1], Ops[0]);
   1823     }
   1824     switch (BuiltinID) {
   1825     case ARM::BI__builtin_neon_vld2_dup_v:
   1826       Int = Intrinsic::arm_neon_vld2lane;
   1827       break;
   1828     case ARM::BI__builtin_neon_vld3_dup_v:
   1829       Int = Intrinsic::arm_neon_vld3lane;
   1830       break;
   1831     case ARM::BI__builtin_neon_vld4_dup_v:
   1832       Int = Intrinsic::arm_neon_vld4lane;
   1833       break;
   1834     default: llvm_unreachable("unknown vld_dup intrinsic?");
   1835     }
   1836     Function *F = CGM.getIntrinsic(Int, Ty);
   1837     llvm::StructType *STy = cast<llvm::StructType>(F->getReturnType());
   1838 
   1839     SmallVector<Value*, 6> Args;
   1840     Args.push_back(Ops[1]);
   1841     Args.append(STy->getNumElements(), UndefValue::get(Ty));
   1842 
   1843     llvm::Constant *CI = ConstantInt::get(Int32Ty, 0);
   1844     Args.push_back(CI);
   1845     Args.push_back(GetPointeeAlignmentValue(E->getArg(1)));
   1846 
   1847     Ops[1] = Builder.CreateCall(F, Args, "vld_dup");
   1848     // splat lane 0 to all elts in each vector of the result.
   1849     for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
   1850       Value *Val = Builder.CreateExtractValue(Ops[1], i);
   1851       Value *Elt = Builder.CreateBitCast(Val, Ty);
   1852       Elt = EmitNeonSplat(Elt, CI);
   1853       Elt = Builder.CreateBitCast(Elt, Val->getType());
   1854       Ops[1] = Builder.CreateInsertValue(Ops[1], Elt, i);
   1855     }
   1856     Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
   1857     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   1858     return Builder.CreateStore(Ops[1], Ops[0]);
   1859   }
   1860   case ARM::BI__builtin_neon_vmax_v:
   1861   case ARM::BI__builtin_neon_vmaxq_v:
   1862     Int = usgn ? Intrinsic::arm_neon_vmaxu : Intrinsic::arm_neon_vmaxs;
   1863     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmax");
   1864   case ARM::BI__builtin_neon_vmin_v:
   1865   case ARM::BI__builtin_neon_vminq_v:
   1866     Int = usgn ? Intrinsic::arm_neon_vminu : Intrinsic::arm_neon_vmins;
   1867     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmin");
   1868   case ARM::BI__builtin_neon_vmovl_v: {
   1869     llvm::Type *DTy =llvm::VectorType::getTruncatedElementVectorType(VTy);
   1870     Ops[0] = Builder.CreateBitCast(Ops[0], DTy);
   1871     if (usgn)
   1872       return Builder.CreateZExt(Ops[0], Ty, "vmovl");
   1873     return Builder.CreateSExt(Ops[0], Ty, "vmovl");
   1874   }
   1875   case ARM::BI__builtin_neon_vmovn_v: {
   1876     llvm::Type *QTy = llvm::VectorType::getExtendedElementVectorType(VTy);
   1877     Ops[0] = Builder.CreateBitCast(Ops[0], QTy);
   1878     return Builder.CreateTrunc(Ops[0], Ty, "vmovn");
   1879   }
   1880   case ARM::BI__builtin_neon_vmul_v:
   1881   case ARM::BI__builtin_neon_vmulq_v:
   1882     assert(Type.isPoly() && "vmul builtin only supported for polynomial types");
   1883     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vmulp, Ty),
   1884                         Ops, "vmul");
   1885   case ARM::BI__builtin_neon_vmull_v:
   1886     Int = usgn ? Intrinsic::arm_neon_vmullu : Intrinsic::arm_neon_vmulls;
   1887     Int = Type.isPoly() ? (unsigned)Intrinsic::arm_neon_vmullp : Int;
   1888     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmull");
   1889   case ARM::BI__builtin_neon_vpadal_v:
   1890   case ARM::BI__builtin_neon_vpadalq_v: {
   1891     Int = usgn ? Intrinsic::arm_neon_vpadalu : Intrinsic::arm_neon_vpadals;
   1892     // The source operand type has twice as many elements of half the size.
   1893     unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
   1894     llvm::Type *EltTy =
   1895       llvm::IntegerType::get(getLLVMContext(), EltBits / 2);
   1896     llvm::Type *NarrowTy =
   1897       llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
   1898     llvm::Type *Tys[2] = { Ty, NarrowTy };
   1899     return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vpadal");
   1900   }
   1901   case ARM::BI__builtin_neon_vpadd_v:
   1902     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vpadd, Ty),
   1903                         Ops, "vpadd");
   1904   case ARM::BI__builtin_neon_vpaddl_v:
   1905   case ARM::BI__builtin_neon_vpaddlq_v: {
   1906     Int = usgn ? Intrinsic::arm_neon_vpaddlu : Intrinsic::arm_neon_vpaddls;
   1907     // The source operand type has twice as many elements of half the size.
   1908     unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
   1909     llvm::Type *EltTy = llvm::IntegerType::get(getLLVMContext(), EltBits / 2);
   1910     llvm::Type *NarrowTy =
   1911       llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
   1912     llvm::Type *Tys[2] = { Ty, NarrowTy };
   1913     return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vpaddl");
   1914   }
   1915   case ARM::BI__builtin_neon_vpmax_v:
   1916     Int = usgn ? Intrinsic::arm_neon_vpmaxu : Intrinsic::arm_neon_vpmaxs;
   1917     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmax");
   1918   case ARM::BI__builtin_neon_vpmin_v:
   1919     Int = usgn ? Intrinsic::arm_neon_vpminu : Intrinsic::arm_neon_vpmins;
   1920     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmin");
   1921   case ARM::BI__builtin_neon_vqabs_v:
   1922   case ARM::BI__builtin_neon_vqabsq_v:
   1923     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqabs, Ty),
   1924                         Ops, "vqabs");
   1925   case ARM::BI__builtin_neon_vqadd_v:
   1926   case ARM::BI__builtin_neon_vqaddq_v:
   1927     Int = usgn ? Intrinsic::arm_neon_vqaddu : Intrinsic::arm_neon_vqadds;
   1928     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqadd");
   1929   case ARM::BI__builtin_neon_vqdmlal_v:
   1930     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmlal, Ty),
   1931                         Ops, "vqdmlal");
   1932   case ARM::BI__builtin_neon_vqdmlsl_v:
   1933     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmlsl, Ty),
   1934                         Ops, "vqdmlsl");
   1935   case ARM::BI__builtin_neon_vqdmulh_v:
   1936   case ARM::BI__builtin_neon_vqdmulhq_v:
   1937     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmulh, Ty),
   1938                         Ops, "vqdmulh");
   1939   case ARM::BI__builtin_neon_vqdmull_v:
   1940     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmull, Ty),
   1941                         Ops, "vqdmull");
   1942   case ARM::BI__builtin_neon_vqmovn_v:
   1943     Int = usgn ? Intrinsic::arm_neon_vqmovnu : Intrinsic::arm_neon_vqmovns;
   1944     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqmovn");
   1945   case ARM::BI__builtin_neon_vqmovun_v:
   1946     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqmovnsu, Ty),
   1947                         Ops, "vqdmull");
   1948   case ARM::BI__builtin_neon_vqneg_v:
   1949   case ARM::BI__builtin_neon_vqnegq_v:
   1950     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqneg, Ty),
   1951                         Ops, "vqneg");
   1952   case ARM::BI__builtin_neon_vqrdmulh_v:
   1953   case ARM::BI__builtin_neon_vqrdmulhq_v:
   1954     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqrdmulh, Ty),
   1955                         Ops, "vqrdmulh");
   1956   case ARM::BI__builtin_neon_vqrshl_v:
   1957   case ARM::BI__builtin_neon_vqrshlq_v:
   1958     Int = usgn ? Intrinsic::arm_neon_vqrshiftu : Intrinsic::arm_neon_vqrshifts;
   1959     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshl");
   1960   case ARM::BI__builtin_neon_vqrshrn_n_v:
   1961     Int = usgn ? Intrinsic::arm_neon_vqrshiftnu : Intrinsic::arm_neon_vqrshiftns;
   1962     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshrn_n",
   1963                         1, true);
   1964   case ARM::BI__builtin_neon_vqrshrun_n_v:
   1965     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqrshiftnsu, Ty),
   1966                         Ops, "vqrshrun_n", 1, true);
   1967   case ARM::BI__builtin_neon_vqshl_v:
   1968   case ARM::BI__builtin_neon_vqshlq_v:
   1969     Int = usgn ? Intrinsic::arm_neon_vqshiftu : Intrinsic::arm_neon_vqshifts;
   1970     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshl");
   1971   case ARM::BI__builtin_neon_vqshl_n_v:
   1972   case ARM::BI__builtin_neon_vqshlq_n_v:
   1973     Int = usgn ? Intrinsic::arm_neon_vqshiftu : Intrinsic::arm_neon_vqshifts;
   1974     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshl_n",
   1975                         1, false);
   1976   case ARM::BI__builtin_neon_vqshlu_n_v:
   1977   case ARM::BI__builtin_neon_vqshluq_n_v:
   1978     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqshiftsu, Ty),
   1979                         Ops, "vqshlu", 1, false);
   1980   case ARM::BI__builtin_neon_vqshrn_n_v:
   1981     Int = usgn ? Intrinsic::arm_neon_vqshiftnu : Intrinsic::arm_neon_vqshiftns;
   1982     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshrn_n",
   1983                         1, true);
   1984   case ARM::BI__builtin_neon_vqshrun_n_v:
   1985     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqshiftnsu, Ty),
   1986                         Ops, "vqshrun_n", 1, true);
   1987   case ARM::BI__builtin_neon_vqsub_v:
   1988   case ARM::BI__builtin_neon_vqsubq_v:
   1989     Int = usgn ? Intrinsic::arm_neon_vqsubu : Intrinsic::arm_neon_vqsubs;
   1990     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqsub");
   1991   case ARM::BI__builtin_neon_vraddhn_v:
   1992     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vraddhn, Ty),
   1993                         Ops, "vraddhn");
   1994   case ARM::BI__builtin_neon_vrecpe_v:
   1995   case ARM::BI__builtin_neon_vrecpeq_v:
   1996     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrecpe, Ty),
   1997                         Ops, "vrecpe");
   1998   case ARM::BI__builtin_neon_vrecps_v:
   1999   case ARM::BI__builtin_neon_vrecpsq_v:
   2000     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrecps, Ty),
   2001                         Ops, "vrecps");
   2002   case ARM::BI__builtin_neon_vrhadd_v:
   2003   case ARM::BI__builtin_neon_vrhaddq_v:
   2004     Int = usgn ? Intrinsic::arm_neon_vrhaddu : Intrinsic::arm_neon_vrhadds;
   2005     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrhadd");
   2006   case ARM::BI__builtin_neon_vrshl_v:
   2007   case ARM::BI__builtin_neon_vrshlq_v:
   2008     Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
   2009     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshl");
   2010   case ARM::BI__builtin_neon_vrshrn_n_v:
   2011     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrshiftn, Ty),
   2012                         Ops, "vrshrn_n", 1, true);
   2013   case ARM::BI__builtin_neon_vrshr_n_v:
   2014   case ARM::BI__builtin_neon_vrshrq_n_v:
   2015     Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
   2016     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshr_n", 1, true);
   2017   case ARM::BI__builtin_neon_vrsqrte_v:
   2018   case ARM::BI__builtin_neon_vrsqrteq_v:
   2019     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrsqrte, Ty),
   2020                         Ops, "vrsqrte");
   2021   case ARM::BI__builtin_neon_vrsqrts_v:
   2022   case ARM::BI__builtin_neon_vrsqrtsq_v:
   2023     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrsqrts, Ty),
   2024                         Ops, "vrsqrts");
   2025   case ARM::BI__builtin_neon_vrsra_n_v:
   2026   case ARM::BI__builtin_neon_vrsraq_n_v:
   2027     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   2028     Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
   2029     Ops[2] = EmitNeonShiftVector(Ops[2], Ty, true);
   2030     Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
   2031     Ops[1] = Builder.CreateCall2(CGM.getIntrinsic(Int, Ty), Ops[1], Ops[2]);
   2032     return Builder.CreateAdd(Ops[0], Ops[1], "vrsra_n");
   2033   case ARM::BI__builtin_neon_vrsubhn_v:
   2034     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrsubhn, Ty),
   2035                         Ops, "vrsubhn");
   2036   case ARM::BI__builtin_neon_vshl_v:
   2037   case ARM::BI__builtin_neon_vshlq_v:
   2038     Int = usgn ? Intrinsic::arm_neon_vshiftu : Intrinsic::arm_neon_vshifts;
   2039     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vshl");
   2040   case ARM::BI__builtin_neon_vshll_n_v:
   2041     Int = usgn ? Intrinsic::arm_neon_vshiftlu : Intrinsic::arm_neon_vshiftls;
   2042     return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vshll", 1);
   2043   case ARM::BI__builtin_neon_vshl_n_v:
   2044   case ARM::BI__builtin_neon_vshlq_n_v:
   2045     Ops[1] = EmitNeonShiftVector(Ops[1], Ty, false);
   2046     return Builder.CreateShl(Builder.CreateBitCast(Ops[0],Ty), Ops[1], "vshl_n");
   2047   case ARM::BI__builtin_neon_vshrn_n_v:
   2048     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vshiftn, Ty),
   2049                         Ops, "vshrn_n", 1, true);
   2050   case ARM::BI__builtin_neon_vshr_n_v:
   2051   case ARM::BI__builtin_neon_vshrq_n_v:
   2052     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   2053     Ops[1] = EmitNeonShiftVector(Ops[1], Ty, false);
   2054     if (usgn)
   2055       return Builder.CreateLShr(Ops[0], Ops[1], "vshr_n");
   2056     else
   2057       return Builder.CreateAShr(Ops[0], Ops[1], "vshr_n");
   2058   case ARM::BI__builtin_neon_vsri_n_v:
   2059   case ARM::BI__builtin_neon_vsriq_n_v:
   2060     rightShift = true;
   2061   case ARM::BI__builtin_neon_vsli_n_v:
   2062   case ARM::BI__builtin_neon_vsliq_n_v:
   2063     Ops[2] = EmitNeonShiftVector(Ops[2], Ty, rightShift);
   2064     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vshiftins, Ty),
   2065                         Ops, "vsli_n");
   2066   case ARM::BI__builtin_neon_vsra_n_v:
   2067   case ARM::BI__builtin_neon_vsraq_n_v:
   2068     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   2069     Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
   2070     Ops[2] = EmitNeonShiftVector(Ops[2], Ty, false);
   2071     if (usgn)
   2072       Ops[1] = Builder.CreateLShr(Ops[1], Ops[2], "vsra_n");
   2073     else
   2074       Ops[1] = Builder.CreateAShr(Ops[1], Ops[2], "vsra_n");
   2075     return Builder.CreateAdd(Ops[0], Ops[1]);
   2076   case ARM::BI__builtin_neon_vst1_v:
   2077   case ARM::BI__builtin_neon_vst1q_v:
   2078     Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
   2079     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst1, Ty),
   2080                         Ops, "");
   2081   case ARM::BI__builtin_neon_vst1_lane_v:
   2082   case ARM::BI__builtin_neon_vst1q_lane_v: {
   2083     Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
   2084     Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2]);
   2085     Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
   2086     StoreInst *St = Builder.CreateStore(Ops[1],
   2087                                         Builder.CreateBitCast(Ops[0], Ty));
   2088     Value *Align = GetPointeeAlignmentValue(E->getArg(0));
   2089     St->setAlignment(cast<ConstantInt>(Align)->getZExtValue());
   2090     return St;
   2091   }
   2092   case ARM::BI__builtin_neon_vst2_v:
   2093   case ARM::BI__builtin_neon_vst2q_v:
   2094     Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
   2095     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst2, Ty),
   2096                         Ops, "");
   2097   case ARM::BI__builtin_neon_vst2_lane_v:
   2098   case ARM::BI__builtin_neon_vst2q_lane_v:
   2099     Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
   2100     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst2lane, Ty),
   2101                         Ops, "");
   2102   case ARM::BI__builtin_neon_vst3_v:
   2103   case ARM::BI__builtin_neon_vst3q_v:
   2104     Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
   2105     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst3, Ty),
   2106                         Ops, "");
   2107   case ARM::BI__builtin_neon_vst3_lane_v:
   2108   case ARM::BI__builtin_neon_vst3q_lane_v:
   2109     Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
   2110     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst3lane, Ty),
   2111                         Ops, "");
   2112   case ARM::BI__builtin_neon_vst4_v:
   2113   case ARM::BI__builtin_neon_vst4q_v:
   2114     Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
   2115     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst4, Ty),
   2116                         Ops, "");
   2117   case ARM::BI__builtin_neon_vst4_lane_v:
   2118   case ARM::BI__builtin_neon_vst4q_lane_v:
   2119     Ops.push_back(GetPointeeAlignmentValue(E->getArg(0)));
   2120     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst4lane, Ty),
   2121                         Ops, "");
   2122   case ARM::BI__builtin_neon_vsubhn_v:
   2123     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vsubhn, Ty),
   2124                         Ops, "vsubhn");
   2125   case ARM::BI__builtin_neon_vtbl1_v:
   2126     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl1),
   2127                         Ops, "vtbl1");
   2128   case ARM::BI__builtin_neon_vtbl2_v:
   2129     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl2),
   2130                         Ops, "vtbl2");
   2131   case ARM::BI__builtin_neon_vtbl3_v:
   2132     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl3),
   2133                         Ops, "vtbl3");
   2134   case ARM::BI__builtin_neon_vtbl4_v:
   2135     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl4),
   2136                         Ops, "vtbl4");
   2137   case ARM::BI__builtin_neon_vtbx1_v:
   2138     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx1),
   2139                         Ops, "vtbx1");
   2140   case ARM::BI__builtin_neon_vtbx2_v:
   2141     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx2),
   2142                         Ops, "vtbx2");
   2143   case ARM::BI__builtin_neon_vtbx3_v:
   2144     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx3),
   2145                         Ops, "vtbx3");
   2146   case ARM::BI__builtin_neon_vtbx4_v:
   2147     return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx4),
   2148                         Ops, "vtbx4");
   2149   case ARM::BI__builtin_neon_vtst_v:
   2150   case ARM::BI__builtin_neon_vtstq_v: {
   2151     Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
   2152     Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
   2153     Ops[0] = Builder.CreateAnd(Ops[0], Ops[1]);
   2154     Ops[0] = Builder.CreateICmp(ICmpInst::ICMP_NE, Ops[0],
   2155                                 ConstantAggregateZero::get(Ty));
   2156     return Builder.CreateSExt(Ops[0], Ty, "vtst");
   2157   }
   2158   case ARM::BI__builtin_neon_vtrn_v:
   2159   case ARM::BI__builtin_neon_vtrnq_v: {
   2160     Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
   2161     Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
   2162     Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
   2163     Value *SV = 0;
   2164 
   2165     for (unsigned vi = 0; vi != 2; ++vi) {
   2166       SmallVector<Constant*, 16> Indices;
   2167       for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
   2168         Indices.push_back(Builder.getInt32(i+vi));
   2169         Indices.push_back(Builder.getInt32(i+e+vi));
   2170       }
   2171       Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ops[0], vi);
   2172       SV = llvm::ConstantVector::get(Indices);
   2173       SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vtrn");
   2174       SV = Builder.CreateStore(SV, Addr);
   2175     }
   2176     return SV;
   2177   }
   2178   case ARM::BI__builtin_neon_vuzp_v:
   2179   case ARM::BI__builtin_neon_vuzpq_v: {
   2180     Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
   2181     Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
   2182     Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
   2183     Value *SV = 0;
   2184 
   2185     for (unsigned vi = 0; vi != 2; ++vi) {
   2186       SmallVector<Constant*, 16> Indices;
   2187       for (unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
   2188         Indices.push_back(ConstantInt::get(Int32Ty, 2*i+vi));
   2189 
   2190       Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ops[0], vi);
   2191       SV = llvm::ConstantVector::get(Indices);
   2192       SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vuzp");
   2193       SV = Builder.CreateStore(SV, Addr);
   2194     }
   2195     return SV;
   2196   }
   2197   case ARM::BI__builtin_neon_vzip_v:
   2198   case ARM::BI__builtin_neon_vzipq_v: {
   2199     Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
   2200     Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
   2201     Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
   2202     Value *SV = 0;
   2203 
   2204     for (unsigned vi = 0; vi != 2; ++vi) {
   2205       SmallVector<Constant*, 16> Indices;
   2206       for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
   2207         Indices.push_back(ConstantInt::get(Int32Ty, (i + vi*e) >> 1));
   2208         Indices.push_back(ConstantInt::get(Int32Ty, ((i + vi*e) >> 1)+e));
   2209       }
   2210       Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ops[0], vi);
   2211       SV = llvm::ConstantVector::get(Indices);
   2212       SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vzip");
   2213       SV = Builder.CreateStore(SV, Addr);
   2214     }
   2215     return SV;
   2216   }
   2217   }
   2218 }
   2219 
   2220 llvm::Value *CodeGenFunction::
   2221 BuildVector(ArrayRef<llvm::Value*> Ops) {
   2222   assert((Ops.size() & (Ops.size() - 1)) == 0 &&
   2223          "Not a power-of-two sized vector!");
   2224   bool AllConstants = true;
   2225   for (unsigned i = 0, e = Ops.size(); i != e && AllConstants; ++i)
   2226     AllConstants &= isa<Constant>(Ops[i]);
   2227 
   2228   // If this is a constant vector, create a ConstantVector.
   2229   if (AllConstants) {
   2230     SmallVector<llvm::Constant*, 16> CstOps;
   2231     for (unsigned i = 0, e = Ops.size(); i != e; ++i)
   2232       CstOps.push_back(cast<Constant>(Ops[i]));
   2233     return llvm::ConstantVector::get(CstOps);
   2234   }
   2235 
   2236   // Otherwise, insertelement the values to build the vector.
   2237   Value *Result =
   2238     llvm::UndefValue::get(llvm::VectorType::get(Ops[0]->getType(), Ops.size()));
   2239 
   2240   for (unsigned i = 0, e = Ops.size(); i != e; ++i)
   2241     Result = Builder.CreateInsertElement(Result, Ops[i], Builder.getInt32(i));
   2242 
   2243   return Result;
   2244 }
   2245 
   2246 Value *CodeGenFunction::EmitX86BuiltinExpr(unsigned BuiltinID,
   2247                                            const CallExpr *E) {
   2248   SmallVector<Value*, 4> Ops;
   2249 
   2250   // Find out if any arguments are required to be integer constant expressions.
   2251   unsigned ICEArguments = 0;
   2252   ASTContext::GetBuiltinTypeError Error;
   2253   getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
   2254   assert(Error == ASTContext::GE_None && "Should not codegen an error");
   2255 
   2256   for (unsigned i = 0, e = E->getNumArgs(); i != e; i++) {
   2257     // If this is a normal argument, just emit it as a scalar.
   2258     if ((ICEArguments & (1 << i)) == 0) {
   2259       Ops.push_back(EmitScalarExpr(E->getArg(i)));
   2260       continue;
   2261     }
   2262 
   2263     // If this is required to be a constant, constant fold it so that we know
   2264     // that the generated intrinsic gets a ConstantInt.
   2265     llvm::APSInt Result;
   2266     bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result, getContext());
   2267     assert(IsConst && "Constant arg isn't actually constant?"); (void)IsConst;
   2268     Ops.push_back(llvm::ConstantInt::get(getLLVMContext(), Result));
   2269   }
   2270 
   2271   switch (BuiltinID) {
   2272   default: return 0;
   2273   case X86::BI__builtin_ia32_vec_init_v8qi:
   2274   case X86::BI__builtin_ia32_vec_init_v4hi:
   2275   case X86::BI__builtin_ia32_vec_init_v2si:
   2276     return Builder.CreateBitCast(BuildVector(Ops),
   2277                                  llvm::Type::getX86_MMXTy(getLLVMContext()));
   2278   case X86::BI__builtin_ia32_vec_ext_v2si:
   2279     return Builder.CreateExtractElement(Ops[0],
   2280                                   llvm::ConstantInt::get(Ops[1]->getType(), 0));
   2281   case X86::BI__builtin_ia32_ldmxcsr: {
   2282     llvm::Type *PtrTy = Int8PtrTy;
   2283     Value *One = llvm::ConstantInt::get(Int32Ty, 1);
   2284     Value *Tmp = Builder.CreateAlloca(Int32Ty, One);
   2285     Builder.CreateStore(Ops[0], Tmp);
   2286     return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse_ldmxcsr),
   2287                               Builder.CreateBitCast(Tmp, PtrTy));
   2288   }
   2289   case X86::BI__builtin_ia32_stmxcsr: {
   2290     llvm::Type *PtrTy = Int8PtrTy;
   2291     Value *One = llvm::ConstantInt::get(Int32Ty, 1);
   2292     Value *Tmp = Builder.CreateAlloca(Int32Ty, One);
   2293     Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse_stmxcsr),
   2294                        Builder.CreateBitCast(Tmp, PtrTy));
   2295     return Builder.CreateLoad(Tmp, "stmxcsr");
   2296   }
   2297   case X86::BI__builtin_ia32_storehps:
   2298   case X86::BI__builtin_ia32_storelps: {
   2299     llvm::Type *PtrTy = llvm::PointerType::getUnqual(Int64Ty);
   2300     llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 2);
   2301 
   2302     // cast val v2i64
   2303     Ops[1] = Builder.CreateBitCast(Ops[1], VecTy, "cast");
   2304 
   2305     // extract (0, 1)
   2306     unsigned Index = BuiltinID == X86::BI__builtin_ia32_storelps ? 0 : 1;
   2307     llvm::Value *Idx = llvm::ConstantInt::get(Int32Ty, Index);
   2308     Ops[1] = Builder.CreateExtractElement(Ops[1], Idx, "extract");
   2309 
   2310     // cast pointer to i64 & store
   2311     Ops[0] = Builder.CreateBitCast(Ops[0], PtrTy);
   2312     return Builder.CreateStore(Ops[1], Ops[0]);
   2313   }
   2314   case X86::BI__builtin_ia32_palignr: {
   2315     unsigned shiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
   2316 
   2317     // If palignr is shifting the pair of input vectors less than 9 bytes,
   2318     // emit a shuffle instruction.
   2319     if (shiftVal <= 8) {
   2320       SmallVector<llvm::Constant*, 8> Indices;
   2321       for (unsigned i = 0; i != 8; ++i)
   2322         Indices.push_back(llvm::ConstantInt::get(Int32Ty, shiftVal + i));
   2323 
   2324       Value* SV = llvm::ConstantVector::get(Indices);
   2325       return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr");
   2326     }
   2327 
   2328     // If palignr is shifting the pair of input vectors more than 8 but less
   2329     // than 16 bytes, emit a logical right shift of the destination.
   2330     if (shiftVal < 16) {
   2331       // MMX has these as 1 x i64 vectors for some odd optimization reasons.
   2332       llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 1);
   2333 
   2334       Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast");
   2335       Ops[1] = llvm::ConstantInt::get(VecTy, (shiftVal-8) * 8);
   2336 
   2337       // create i32 constant
   2338       llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_mmx_psrl_q);
   2339       return Builder.CreateCall(F, makeArrayRef(&Ops[0], 2), "palignr");
   2340     }
   2341 
   2342     // If palignr is shifting the pair of vectors more than 16 bytes, emit zero.
   2343     return llvm::Constant::getNullValue(ConvertType(E->getType()));
   2344   }
   2345   case X86::BI__builtin_ia32_palignr128: {
   2346     unsigned shiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
   2347 
   2348     // If palignr is shifting the pair of input vectors less than 17 bytes,
   2349     // emit a shuffle instruction.
   2350     if (shiftVal <= 16) {
   2351       SmallVector<llvm::Constant*, 16> Indices;
   2352       for (unsigned i = 0; i != 16; ++i)
   2353         Indices.push_back(llvm::ConstantInt::get(Int32Ty, shiftVal + i));
   2354 
   2355       Value* SV = llvm::ConstantVector::get(Indices);
   2356       return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr");
   2357     }
   2358 
   2359     // If palignr is shifting the pair of input vectors more than 16 but less
   2360     // than 32 bytes, emit a logical right shift of the destination.
   2361     if (shiftVal < 32) {
   2362       llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 2);
   2363 
   2364       Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast");
   2365       Ops[1] = llvm::ConstantInt::get(Int32Ty, (shiftVal-16) * 8);
   2366 
   2367       // create i32 constant
   2368       llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_sse2_psrl_dq);
   2369       return Builder.CreateCall(F, makeArrayRef(&Ops[0], 2), "palignr");
   2370     }
   2371 
   2372     // If palignr is shifting the pair of vectors more than 32 bytes, emit zero.
   2373     return llvm::Constant::getNullValue(ConvertType(E->getType()));
   2374   }
   2375   case X86::BI__builtin_ia32_palignr256: {
   2376     unsigned shiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
   2377 
   2378     // If palignr is shifting the pair of input vectors less than 17 bytes,
   2379     // emit a shuffle instruction.
   2380     if (shiftVal <= 16) {
   2381       SmallVector<llvm::Constant*, 32> Indices;
   2382       // 256-bit palignr operates on 128-bit lanes so we need to handle that
   2383       for (unsigned l = 0; l != 2; ++l) {
   2384         unsigned LaneStart = l * 16;
   2385         unsigned LaneEnd = (l+1) * 16;
   2386         for (unsigned i = 0; i != 16; ++i) {
   2387           unsigned Idx = shiftVal + i + LaneStart;
   2388           if (Idx >= LaneEnd) Idx += 16; // end of lane, switch operand
   2389           Indices.push_back(llvm::ConstantInt::get(Int32Ty, Idx));
   2390         }
   2391       }
   2392 
   2393       Value* SV = llvm::ConstantVector::get(Indices);
   2394       return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr");
   2395     }
   2396 
   2397     // If palignr is shifting the pair of input vectors more than 16 but less
   2398     // than 32 bytes, emit a logical right shift of the destination.
   2399     if (shiftVal < 32) {
   2400       llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 4);
   2401 
   2402       Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast");
   2403       Ops[1] = llvm::ConstantInt::get(Int32Ty, (shiftVal-16) * 8);
   2404 
   2405       // create i32 constant
   2406       llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_avx2_psrl_dq);
   2407       return Builder.CreateCall(F, makeArrayRef(&Ops[0], 2), "palignr");
   2408     }
   2409 
   2410     // If palignr is shifting the pair of vectors more than 32 bytes, emit zero.
   2411     return llvm::Constant::getNullValue(ConvertType(E->getType()));
   2412   }
   2413   case X86::BI__builtin_ia32_movntps:
   2414   case X86::BI__builtin_ia32_movntpd:
   2415   case X86::BI__builtin_ia32_movntdq:
   2416   case X86::BI__builtin_ia32_movnti: {
   2417     llvm::MDNode *Node = llvm::MDNode::get(getLLVMContext(),
   2418                                            Builder.getInt32(1));
   2419 
   2420     // Convert the type of the pointer to a pointer to the stored type.
   2421     Value *BC = Builder.CreateBitCast(Ops[0],
   2422                                 llvm::PointerType::getUnqual(Ops[1]->getType()),
   2423                                       "cast");
   2424     StoreInst *SI = Builder.CreateStore(Ops[1], BC);
   2425     SI->setMetadata(CGM.getModule().getMDKindID("nontemporal"), Node);
   2426     SI->setAlignment(16);
   2427     return SI;
   2428   }
   2429   // 3DNow!
   2430   case X86::BI__builtin_ia32_pswapdsf:
   2431   case X86::BI__builtin_ia32_pswapdsi: {
   2432     const char *name = 0;
   2433     Intrinsic::ID ID = Intrinsic::not_intrinsic;
   2434     switch(BuiltinID) {
   2435     default: llvm_unreachable("Unsupported intrinsic!");
   2436     case X86::BI__builtin_ia32_pswapdsf:
   2437     case X86::BI__builtin_ia32_pswapdsi:
   2438       name = "pswapd";
   2439       ID = Intrinsic::x86_3dnowa_pswapd;
   2440       break;
   2441     }
   2442     llvm::Type *MMXTy = llvm::Type::getX86_MMXTy(getLLVMContext());
   2443     Ops[0] = Builder.CreateBitCast(Ops[0], MMXTy, "cast");
   2444     llvm::Function *F = CGM.getIntrinsic(ID);
   2445     return Builder.CreateCall(F, Ops, name);
   2446   }
   2447   }
   2448 }
   2449 
   2450 
   2451 Value *CodeGenFunction::EmitHexagonBuiltinExpr(unsigned BuiltinID,
   2452                                              const CallExpr *E) {
   2453   llvm::SmallVector<Value*, 4> Ops;
   2454 
   2455   for (unsigned i = 0, e = E->getNumArgs(); i != e; i++)
   2456     Ops.push_back(EmitScalarExpr(E->getArg(i)));
   2457 
   2458   Intrinsic::ID ID = Intrinsic::not_intrinsic;
   2459 
   2460   switch (BuiltinID) {
   2461   default: return 0;
   2462 
   2463   case Hexagon::BI__builtin_HEXAGON_C2_cmpeq:
   2464     ID = Intrinsic::hexagon_C2_cmpeq; break;
   2465 
   2466   case Hexagon::BI__builtin_HEXAGON_C2_cmpgt:
   2467     ID = Intrinsic::hexagon_C2_cmpgt; break;
   2468 
   2469   case Hexagon::BI__builtin_HEXAGON_C2_cmpgtu:
   2470     ID = Intrinsic::hexagon_C2_cmpgtu; break;
   2471 
   2472   case Hexagon::BI__builtin_HEXAGON_C2_cmpeqp:
   2473     ID = Intrinsic::hexagon_C2_cmpeqp; break;
   2474 
   2475   case Hexagon::BI__builtin_HEXAGON_C2_cmpgtp:
   2476     ID = Intrinsic::hexagon_C2_cmpgtp; break;
   2477 
   2478   case Hexagon::BI__builtin_HEXAGON_C2_cmpgtup:
   2479     ID = Intrinsic::hexagon_C2_cmpgtup; break;
   2480 
   2481   case Hexagon::BI__builtin_HEXAGON_C2_bitsset:
   2482     ID = Intrinsic::hexagon_C2_bitsset; break;
   2483 
   2484   case Hexagon::BI__builtin_HEXAGON_C2_bitsclr:
   2485     ID = Intrinsic::hexagon_C2_bitsclr; break;
   2486 
   2487   case Hexagon::BI__builtin_HEXAGON_C2_cmpeqi:
   2488     ID = Intrinsic::hexagon_C2_cmpeqi; break;
   2489 
   2490   case Hexagon::BI__builtin_HEXAGON_C2_cmpgti:
   2491     ID = Intrinsic::hexagon_C2_cmpgti; break;
   2492 
   2493   case Hexagon::BI__builtin_HEXAGON_C2_cmpgtui:
   2494     ID = Intrinsic::hexagon_C2_cmpgtui; break;
   2495 
   2496   case Hexagon::BI__builtin_HEXAGON_C2_cmpgei:
   2497     ID = Intrinsic::hexagon_C2_cmpgei; break;
   2498 
   2499   case Hexagon::BI__builtin_HEXAGON_C2_cmpgeui:
   2500     ID = Intrinsic::hexagon_C2_cmpgeui; break;
   2501 
   2502   case Hexagon::BI__builtin_HEXAGON_C2_cmplt:
   2503     ID = Intrinsic::hexagon_C2_cmplt; break;
   2504 
   2505   case Hexagon::BI__builtin_HEXAGON_C2_cmpltu:
   2506     ID = Intrinsic::hexagon_C2_cmpltu; break;
   2507 
   2508   case Hexagon::BI__builtin_HEXAGON_C2_bitsclri:
   2509     ID = Intrinsic::hexagon_C2_bitsclri; break;
   2510 
   2511   case Hexagon::BI__builtin_HEXAGON_C2_and:
   2512     ID = Intrinsic::hexagon_C2_and; break;
   2513 
   2514   case Hexagon::BI__builtin_HEXAGON_C2_or:
   2515     ID = Intrinsic::hexagon_C2_or; break;
   2516 
   2517   case Hexagon::BI__builtin_HEXAGON_C2_xor:
   2518     ID = Intrinsic::hexagon_C2_xor; break;
   2519 
   2520   case Hexagon::BI__builtin_HEXAGON_C2_andn:
   2521     ID = Intrinsic::hexagon_C2_andn; break;
   2522 
   2523   case Hexagon::BI__builtin_HEXAGON_C2_not:
   2524     ID = Intrinsic::hexagon_C2_not; break;
   2525 
   2526   case Hexagon::BI__builtin_HEXAGON_C2_orn:
   2527     ID = Intrinsic::hexagon_C2_orn; break;
   2528 
   2529   case Hexagon::BI__builtin_HEXAGON_C2_pxfer_map:
   2530     ID = Intrinsic::hexagon_C2_pxfer_map; break;
   2531 
   2532   case Hexagon::BI__builtin_HEXAGON_C2_any8:
   2533     ID = Intrinsic::hexagon_C2_any8; break;
   2534 
   2535   case Hexagon::BI__builtin_HEXAGON_C2_all8:
   2536     ID = Intrinsic::hexagon_C2_all8; break;
   2537 
   2538   case Hexagon::BI__builtin_HEXAGON_C2_vitpack:
   2539     ID = Intrinsic::hexagon_C2_vitpack; break;
   2540 
   2541   case Hexagon::BI__builtin_HEXAGON_C2_mux:
   2542     ID = Intrinsic::hexagon_C2_mux; break;
   2543 
   2544   case Hexagon::BI__builtin_HEXAGON_C2_muxii:
   2545     ID = Intrinsic::hexagon_C2_muxii; break;
   2546 
   2547   case Hexagon::BI__builtin_HEXAGON_C2_muxir:
   2548     ID = Intrinsic::hexagon_C2_muxir; break;
   2549 
   2550   case Hexagon::BI__builtin_HEXAGON_C2_muxri:
   2551     ID = Intrinsic::hexagon_C2_muxri; break;
   2552 
   2553   case Hexagon::BI__builtin_HEXAGON_C2_vmux:
   2554     ID = Intrinsic::hexagon_C2_vmux; break;
   2555 
   2556   case Hexagon::BI__builtin_HEXAGON_C2_mask:
   2557     ID = Intrinsic::hexagon_C2_mask; break;
   2558 
   2559   case Hexagon::BI__builtin_HEXAGON_A2_vcmpbeq:
   2560     ID = Intrinsic::hexagon_A2_vcmpbeq; break;
   2561 
   2562   case Hexagon::BI__builtin_HEXAGON_A2_vcmpbgtu:
   2563     ID = Intrinsic::hexagon_A2_vcmpbgtu; break;
   2564 
   2565   case Hexagon::BI__builtin_HEXAGON_A2_vcmpheq:
   2566     ID = Intrinsic::hexagon_A2_vcmpheq; break;
   2567 
   2568   case Hexagon::BI__builtin_HEXAGON_A2_vcmphgt:
   2569     ID = Intrinsic::hexagon_A2_vcmphgt; break;
   2570 
   2571   case Hexagon::BI__builtin_HEXAGON_A2_vcmphgtu:
   2572     ID = Intrinsic::hexagon_A2_vcmphgtu; break;
   2573 
   2574   case Hexagon::BI__builtin_HEXAGON_A2_vcmpweq:
   2575     ID = Intrinsic::hexagon_A2_vcmpweq; break;
   2576 
   2577   case Hexagon::BI__builtin_HEXAGON_A2_vcmpwgt:
   2578     ID = Intrinsic::hexagon_A2_vcmpwgt; break;
   2579 
   2580   case Hexagon::BI__builtin_HEXAGON_A2_vcmpwgtu:
   2581     ID = Intrinsic::hexagon_A2_vcmpwgtu; break;
   2582 
   2583   case Hexagon::BI__builtin_HEXAGON_C2_tfrpr:
   2584     ID = Intrinsic::hexagon_C2_tfrpr; break;
   2585 
   2586   case Hexagon::BI__builtin_HEXAGON_C2_tfrrp:
   2587     ID = Intrinsic::hexagon_C2_tfrrp; break;
   2588 
   2589   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hh_s0:
   2590     ID = Intrinsic::hexagon_M2_mpy_acc_hh_s0; break;
   2591 
   2592   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hh_s1:
   2593     ID = Intrinsic::hexagon_M2_mpy_acc_hh_s1; break;
   2594 
   2595   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hl_s0:
   2596     ID = Intrinsic::hexagon_M2_mpy_acc_hl_s0; break;
   2597 
   2598   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hl_s1:
   2599     ID = Intrinsic::hexagon_M2_mpy_acc_hl_s1; break;
   2600 
   2601   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_lh_s0:
   2602     ID = Intrinsic::hexagon_M2_mpy_acc_lh_s0; break;
   2603 
   2604   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_lh_s1:
   2605     ID = Intrinsic::hexagon_M2_mpy_acc_lh_s1; break;
   2606 
   2607   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_ll_s0:
   2608     ID = Intrinsic::hexagon_M2_mpy_acc_ll_s0; break;
   2609 
   2610   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_ll_s1:
   2611     ID = Intrinsic::hexagon_M2_mpy_acc_ll_s1; break;
   2612 
   2613   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hh_s0:
   2614     ID = Intrinsic::hexagon_M2_mpy_nac_hh_s0; break;
   2615 
   2616   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hh_s1:
   2617     ID = Intrinsic::hexagon_M2_mpy_nac_hh_s1; break;
   2618 
   2619   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hl_s0:
   2620     ID = Intrinsic::hexagon_M2_mpy_nac_hl_s0; break;
   2621 
   2622   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hl_s1:
   2623     ID = Intrinsic::hexagon_M2_mpy_nac_hl_s1; break;
   2624 
   2625   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_lh_s0:
   2626     ID = Intrinsic::hexagon_M2_mpy_nac_lh_s0; break;
   2627 
   2628   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_lh_s1:
   2629     ID = Intrinsic::hexagon_M2_mpy_nac_lh_s1; break;
   2630 
   2631   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_ll_s0:
   2632     ID = Intrinsic::hexagon_M2_mpy_nac_ll_s0; break;
   2633 
   2634   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_ll_s1:
   2635     ID = Intrinsic::hexagon_M2_mpy_nac_ll_s1; break;
   2636 
   2637   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hh_s0:
   2638     ID = Intrinsic::hexagon_M2_mpy_acc_sat_hh_s0; break;
   2639 
   2640   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hh_s1:
   2641     ID = Intrinsic::hexagon_M2_mpy_acc_sat_hh_s1; break;
   2642 
   2643   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hl_s0:
   2644     ID = Intrinsic::hexagon_M2_mpy_acc_sat_hl_s0; break;
   2645 
   2646   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hl_s1:
   2647     ID = Intrinsic::hexagon_M2_mpy_acc_sat_hl_s1; break;
   2648 
   2649   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_lh_s0:
   2650     ID = Intrinsic::hexagon_M2_mpy_acc_sat_lh_s0; break;
   2651 
   2652   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_lh_s1:
   2653     ID = Intrinsic::hexagon_M2_mpy_acc_sat_lh_s1; break;
   2654 
   2655   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_ll_s0:
   2656     ID = Intrinsic::hexagon_M2_mpy_acc_sat_ll_s0; break;
   2657 
   2658   case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_ll_s1:
   2659     ID = Intrinsic::hexagon_M2_mpy_acc_sat_ll_s1; break;
   2660 
   2661   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hh_s0:
   2662     ID = Intrinsic::hexagon_M2_mpy_nac_sat_hh_s0; break;
   2663 
   2664   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hh_s1:
   2665     ID = Intrinsic::hexagon_M2_mpy_nac_sat_hh_s1; break;
   2666 
   2667   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hl_s0:
   2668     ID = Intrinsic::hexagon_M2_mpy_nac_sat_hl_s0; break;
   2669 
   2670   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hl_s1:
   2671     ID = Intrinsic::hexagon_M2_mpy_nac_sat_hl_s1; break;
   2672 
   2673   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_lh_s0:
   2674     ID = Intrinsic::hexagon_M2_mpy_nac_sat_lh_s0; break;
   2675 
   2676   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_lh_s1:
   2677     ID = Intrinsic::hexagon_M2_mpy_nac_sat_lh_s1; break;
   2678 
   2679   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_ll_s0:
   2680     ID = Intrinsic::hexagon_M2_mpy_nac_sat_ll_s0; break;
   2681 
   2682   case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_ll_s1:
   2683     ID = Intrinsic::hexagon_M2_mpy_nac_sat_ll_s1; break;
   2684 
   2685   case Hexagon::BI__builtin_HEXAGON_M2_mpy_hh_s0:
   2686     ID = Intrinsic::hexagon_M2_mpy_hh_s0; break;
   2687 
   2688   case Hexagon::BI__builtin_HEXAGON_M2_mpy_hh_s1:
   2689     ID = Intrinsic::hexagon_M2_mpy_hh_s1; break;
   2690 
   2691   case Hexagon::BI__builtin_HEXAGON_M2_mpy_hl_s0:
   2692     ID = Intrinsic::hexagon_M2_mpy_hl_s0; break;
   2693 
   2694   case Hexagon::BI__builtin_HEXAGON_M2_mpy_hl_s1:
   2695     ID = Intrinsic::hexagon_M2_mpy_hl_s1; break;
   2696 
   2697   case Hexagon::BI__builtin_HEXAGON_M2_mpy_lh_s0:
   2698     ID = Intrinsic::hexagon_M2_mpy_lh_s0; break;
   2699 
   2700   case Hexagon::BI__builtin_HEXAGON_M2_mpy_lh_s1:
   2701     ID = Intrinsic::hexagon_M2_mpy_lh_s1; break;
   2702 
   2703   case Hexagon::BI__builtin_HEXAGON_M2_mpy_ll_s0:
   2704     ID = Intrinsic::hexagon_M2_mpy_ll_s0; break;
   2705 
   2706   case Hexagon::BI__builtin_HEXAGON_M2_mpy_ll_s1:
   2707     ID = Intrinsic::hexagon_M2_mpy_ll_s1; break;
   2708 
   2709   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hh_s0:
   2710     ID = Intrinsic::hexagon_M2_mpy_sat_hh_s0; break;
   2711 
   2712   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hh_s1:
   2713     ID = Intrinsic::hexagon_M2_mpy_sat_hh_s1; break;
   2714 
   2715   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hl_s0:
   2716     ID = Intrinsic::hexagon_M2_mpy_sat_hl_s0; break;
   2717 
   2718   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hl_s1:
   2719     ID = Intrinsic::hexagon_M2_mpy_sat_hl_s1; break;
   2720 
   2721   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_lh_s0:
   2722     ID = Intrinsic::hexagon_M2_mpy_sat_lh_s0; break;
   2723 
   2724   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_lh_s1:
   2725     ID = Intrinsic::hexagon_M2_mpy_sat_lh_s1; break;
   2726 
   2727   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_ll_s0:
   2728     ID = Intrinsic::hexagon_M2_mpy_sat_ll_s0; break;
   2729 
   2730   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_ll_s1:
   2731     ID = Intrinsic::hexagon_M2_mpy_sat_ll_s1; break;
   2732 
   2733   case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hh_s0:
   2734     ID = Intrinsic::hexagon_M2_mpy_rnd_hh_s0; break;
   2735 
   2736   case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hh_s1:
   2737     ID = Intrinsic::hexagon_M2_mpy_rnd_hh_s1; break;
   2738 
   2739   case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hl_s0:
   2740     ID = Intrinsic::hexagon_M2_mpy_rnd_hl_s0; break;
   2741 
   2742   case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hl_s1:
   2743     ID = Intrinsic::hexagon_M2_mpy_rnd_hl_s1; break;
   2744 
   2745   case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_lh_s0:
   2746     ID = Intrinsic::hexagon_M2_mpy_rnd_lh_s0; break;
   2747 
   2748   case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_lh_s1:
   2749     ID = Intrinsic::hexagon_M2_mpy_rnd_lh_s1; break;
   2750 
   2751   case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_ll_s0:
   2752     ID = Intrinsic::hexagon_M2_mpy_rnd_ll_s0; break;
   2753 
   2754   case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_ll_s1:
   2755     ID = Intrinsic::hexagon_M2_mpy_rnd_ll_s1; break;
   2756 
   2757   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s0:
   2758     ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hh_s0; break;
   2759 
   2760   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s1:
   2761     ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hh_s1; break;
   2762 
   2763   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s0:
   2764     ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hl_s0; break;
   2765 
   2766   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s1:
   2767     ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hl_s1; break;
   2768 
   2769   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s0:
   2770     ID = Intrinsic::hexagon_M2_mpy_sat_rnd_lh_s0; break;
   2771 
   2772   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s1:
   2773     ID = Intrinsic::hexagon_M2_mpy_sat_rnd_lh_s1; break;
   2774 
   2775   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s0:
   2776     ID = Intrinsic::hexagon_M2_mpy_sat_rnd_ll_s0; break;
   2777 
   2778   case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s1:
   2779     ID = Intrinsic::hexagon_M2_mpy_sat_rnd_ll_s1; break;
   2780 
   2781   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hh_s0:
   2782     ID = Intrinsic::hexagon_M2_mpyd_acc_hh_s0; break;
   2783 
   2784   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hh_s1:
   2785     ID = Intrinsic::hexagon_M2_mpyd_acc_hh_s1; break;
   2786 
   2787   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hl_s0:
   2788     ID = Intrinsic::hexagon_M2_mpyd_acc_hl_s0; break;
   2789 
   2790   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hl_s1:
   2791     ID = Intrinsic::hexagon_M2_mpyd_acc_hl_s1; break;
   2792 
   2793   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_lh_s0:
   2794     ID = Intrinsic::hexagon_M2_mpyd_acc_lh_s0; break;
   2795 
   2796   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_lh_s1:
   2797     ID = Intrinsic::hexagon_M2_mpyd_acc_lh_s1; break;
   2798 
   2799   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_ll_s0:
   2800     ID = Intrinsic::hexagon_M2_mpyd_acc_ll_s0; break;
   2801 
   2802   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_ll_s1:
   2803     ID = Intrinsic::hexagon_M2_mpyd_acc_ll_s1; break;
   2804 
   2805   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hh_s0:
   2806     ID = Intrinsic::hexagon_M2_mpyd_nac_hh_s0; break;
   2807 
   2808   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hh_s1:
   2809     ID = Intrinsic::hexagon_M2_mpyd_nac_hh_s1; break;
   2810 
   2811   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hl_s0:
   2812     ID = Intrinsic::hexagon_M2_mpyd_nac_hl_s0; break;
   2813 
   2814   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hl_s1:
   2815     ID = Intrinsic::hexagon_M2_mpyd_nac_hl_s1; break;
   2816 
   2817   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_lh_s0:
   2818     ID = Intrinsic::hexagon_M2_mpyd_nac_lh_s0; break;
   2819 
   2820   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_lh_s1:
   2821     ID = Intrinsic::hexagon_M2_mpyd_nac_lh_s1; break;
   2822 
   2823   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_ll_s0:
   2824     ID = Intrinsic::hexagon_M2_mpyd_nac_ll_s0; break;
   2825 
   2826   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_ll_s1:
   2827     ID = Intrinsic::hexagon_M2_mpyd_nac_ll_s1; break;
   2828 
   2829   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hh_s0:
   2830     ID = Intrinsic::hexagon_M2_mpyd_hh_s0; break;
   2831 
   2832   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hh_s1:
   2833     ID = Intrinsic::hexagon_M2_mpyd_hh_s1; break;
   2834 
   2835   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hl_s0:
   2836     ID = Intrinsic::hexagon_M2_mpyd_hl_s0; break;
   2837 
   2838   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hl_s1:
   2839     ID = Intrinsic::hexagon_M2_mpyd_hl_s1; break;
   2840 
   2841   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_lh_s0:
   2842     ID = Intrinsic::hexagon_M2_mpyd_lh_s0; break;
   2843 
   2844   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_lh_s1:
   2845     ID = Intrinsic::hexagon_M2_mpyd_lh_s1; break;
   2846 
   2847   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_ll_s0:
   2848     ID = Intrinsic::hexagon_M2_mpyd_ll_s0; break;
   2849 
   2850   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_ll_s1:
   2851     ID = Intrinsic::hexagon_M2_mpyd_ll_s1; break;
   2852 
   2853   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hh_s0:
   2854     ID = Intrinsic::hexagon_M2_mpyd_rnd_hh_s0; break;
   2855 
   2856   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hh_s1:
   2857     ID = Intrinsic::hexagon_M2_mpyd_rnd_hh_s1; break;
   2858 
   2859   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hl_s0:
   2860     ID = Intrinsic::hexagon_M2_mpyd_rnd_hl_s0; break;
   2861 
   2862   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hl_s1:
   2863     ID = Intrinsic::hexagon_M2_mpyd_rnd_hl_s1; break;
   2864 
   2865   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_lh_s0:
   2866     ID = Intrinsic::hexagon_M2_mpyd_rnd_lh_s0; break;
   2867 
   2868   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_lh_s1:
   2869     ID = Intrinsic::hexagon_M2_mpyd_rnd_lh_s1; break;
   2870 
   2871   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_ll_s0:
   2872     ID = Intrinsic::hexagon_M2_mpyd_rnd_ll_s0; break;
   2873 
   2874   case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_ll_s1:
   2875     ID = Intrinsic::hexagon_M2_mpyd_rnd_ll_s1; break;
   2876 
   2877   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hh_s0:
   2878     ID = Intrinsic::hexagon_M2_mpyu_acc_hh_s0; break;
   2879 
   2880   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hh_s1:
   2881     ID = Intrinsic::hexagon_M2_mpyu_acc_hh_s1; break;
   2882 
   2883   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hl_s0:
   2884     ID = Intrinsic::hexagon_M2_mpyu_acc_hl_s0; break;
   2885 
   2886   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hl_s1:
   2887     ID = Intrinsic::hexagon_M2_mpyu_acc_hl_s1; break;
   2888 
   2889   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_lh_s0:
   2890     ID = Intrinsic::hexagon_M2_mpyu_acc_lh_s0; break;
   2891 
   2892   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_lh_s1:
   2893     ID = Intrinsic::hexagon_M2_mpyu_acc_lh_s1; break;
   2894 
   2895   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_ll_s0:
   2896     ID = Intrinsic::hexagon_M2_mpyu_acc_ll_s0; break;
   2897 
   2898   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_ll_s1:
   2899     ID = Intrinsic::hexagon_M2_mpyu_acc_ll_s1; break;
   2900 
   2901   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hh_s0:
   2902     ID = Intrinsic::hexagon_M2_mpyu_nac_hh_s0; break;
   2903 
   2904   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hh_s1:
   2905     ID = Intrinsic::hexagon_M2_mpyu_nac_hh_s1; break;
   2906 
   2907   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hl_s0:
   2908     ID = Intrinsic::hexagon_M2_mpyu_nac_hl_s0; break;
   2909 
   2910   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hl_s1:
   2911     ID = Intrinsic::hexagon_M2_mpyu_nac_hl_s1; break;
   2912 
   2913   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_lh_s0:
   2914     ID = Intrinsic::hexagon_M2_mpyu_nac_lh_s0; break;
   2915 
   2916   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_lh_s1:
   2917     ID = Intrinsic::hexagon_M2_mpyu_nac_lh_s1; break;
   2918 
   2919   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_ll_s0:
   2920     ID = Intrinsic::hexagon_M2_mpyu_nac_ll_s0; break;
   2921 
   2922   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_ll_s1:
   2923     ID = Intrinsic::hexagon_M2_mpyu_nac_ll_s1; break;
   2924 
   2925   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hh_s0:
   2926     ID = Intrinsic::hexagon_M2_mpyu_hh_s0; break;
   2927 
   2928   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hh_s1:
   2929     ID = Intrinsic::hexagon_M2_mpyu_hh_s1; break;
   2930 
   2931   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hl_s0:
   2932     ID = Intrinsic::hexagon_M2_mpyu_hl_s0; break;
   2933 
   2934   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hl_s1:
   2935     ID = Intrinsic::hexagon_M2_mpyu_hl_s1; break;
   2936 
   2937   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_lh_s0:
   2938     ID = Intrinsic::hexagon_M2_mpyu_lh_s0; break;
   2939 
   2940   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_lh_s1:
   2941     ID = Intrinsic::hexagon_M2_mpyu_lh_s1; break;
   2942 
   2943   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_ll_s0:
   2944     ID = Intrinsic::hexagon_M2_mpyu_ll_s0; break;
   2945 
   2946   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_ll_s1:
   2947     ID = Intrinsic::hexagon_M2_mpyu_ll_s1; break;
   2948 
   2949   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hh_s0:
   2950     ID = Intrinsic::hexagon_M2_mpyud_acc_hh_s0; break;
   2951 
   2952   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hh_s1:
   2953     ID = Intrinsic::hexagon_M2_mpyud_acc_hh_s1; break;
   2954 
   2955   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hl_s0:
   2956     ID = Intrinsic::hexagon_M2_mpyud_acc_hl_s0; break;
   2957 
   2958   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hl_s1:
   2959     ID = Intrinsic::hexagon_M2_mpyud_acc_hl_s1; break;
   2960 
   2961   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_lh_s0:
   2962     ID = Intrinsic::hexagon_M2_mpyud_acc_lh_s0; break;
   2963 
   2964   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_lh_s1:
   2965     ID = Intrinsic::hexagon_M2_mpyud_acc_lh_s1; break;
   2966 
   2967   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_ll_s0:
   2968     ID = Intrinsic::hexagon_M2_mpyud_acc_ll_s0; break;
   2969 
   2970   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_ll_s1:
   2971     ID = Intrinsic::hexagon_M2_mpyud_acc_ll_s1; break;
   2972 
   2973   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hh_s0:
   2974     ID = Intrinsic::hexagon_M2_mpyud_nac_hh_s0; break;
   2975 
   2976   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hh_s1:
   2977     ID = Intrinsic::hexagon_M2_mpyud_nac_hh_s1; break;
   2978 
   2979   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hl_s0:
   2980     ID = Intrinsic::hexagon_M2_mpyud_nac_hl_s0; break;
   2981 
   2982   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hl_s1:
   2983     ID = Intrinsic::hexagon_M2_mpyud_nac_hl_s1; break;
   2984 
   2985   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_lh_s0:
   2986     ID = Intrinsic::hexagon_M2_mpyud_nac_lh_s0; break;
   2987 
   2988   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_lh_s1:
   2989     ID = Intrinsic::hexagon_M2_mpyud_nac_lh_s1; break;
   2990 
   2991   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_ll_s0:
   2992     ID = Intrinsic::hexagon_M2_mpyud_nac_ll_s0; break;
   2993 
   2994   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_ll_s1:
   2995     ID = Intrinsic::hexagon_M2_mpyud_nac_ll_s1; break;
   2996 
   2997   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hh_s0:
   2998     ID = Intrinsic::hexagon_M2_mpyud_hh_s0; break;
   2999 
   3000   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hh_s1:
   3001     ID = Intrinsic::hexagon_M2_mpyud_hh_s1; break;
   3002 
   3003   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hl_s0:
   3004     ID = Intrinsic::hexagon_M2_mpyud_hl_s0; break;
   3005 
   3006   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hl_s1:
   3007     ID = Intrinsic::hexagon_M2_mpyud_hl_s1; break;
   3008 
   3009   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_lh_s0:
   3010     ID = Intrinsic::hexagon_M2_mpyud_lh_s0; break;
   3011 
   3012   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_lh_s1:
   3013     ID = Intrinsic::hexagon_M2_mpyud_lh_s1; break;
   3014 
   3015   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_ll_s0:
   3016     ID = Intrinsic::hexagon_M2_mpyud_ll_s0; break;
   3017 
   3018   case Hexagon::BI__builtin_HEXAGON_M2_mpyud_ll_s1:
   3019     ID = Intrinsic::hexagon_M2_mpyud_ll_s1; break;
   3020 
   3021   case Hexagon::BI__builtin_HEXAGON_M2_mpysmi:
   3022     ID = Intrinsic::hexagon_M2_mpysmi; break;
   3023 
   3024   case Hexagon::BI__builtin_HEXAGON_M2_macsip:
   3025     ID = Intrinsic::hexagon_M2_macsip; break;
   3026 
   3027   case Hexagon::BI__builtin_HEXAGON_M2_macsin:
   3028     ID = Intrinsic::hexagon_M2_macsin; break;
   3029 
   3030   case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_s0:
   3031     ID = Intrinsic::hexagon_M2_dpmpyss_s0; break;
   3032 
   3033   case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_acc_s0:
   3034     ID = Intrinsic::hexagon_M2_dpmpyss_acc_s0; break;
   3035 
   3036   case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_nac_s0:
   3037     ID = Intrinsic::hexagon_M2_dpmpyss_nac_s0; break;
   3038 
   3039   case Hexagon::BI__builtin_HEXAGON_M2_dpmpyuu_s0:
   3040     ID = Intrinsic::hexagon_M2_dpmpyuu_s0; break;
   3041 
   3042   case Hexagon::BI__builtin_HEXAGON_M2_dpmpyuu_acc_s0:
   3043     ID = Intrinsic::hexagon_M2_dpmpyuu_acc_s0; break;
   3044 
   3045   case Hexagon::BI__builtin_HEXAGON_M2_dpmpyuu_nac_s0:
   3046     ID = Intrinsic::hexagon_M2_dpmpyuu_nac_s0; break;
   3047 
   3048   case Hexagon::BI__builtin_HEXAGON_M2_mpy_up:
   3049     ID = Intrinsic::hexagon_M2_mpy_up; break;
   3050 
   3051   case Hexagon::BI__builtin_HEXAGON_M2_mpyu_up:
   3052     ID = Intrinsic::hexagon_M2_mpyu_up; break;
   3053 
   3054   case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_rnd_s0:
   3055     ID = Intrinsic::hexagon_M2_dpmpyss_rnd_s0; break;
   3056 
   3057   case Hexagon::BI__builtin_HEXAGON_M2_mpyi:
   3058     ID = Intrinsic::hexagon_M2_mpyi; break;
   3059 
   3060   case Hexagon::BI__builtin_HEXAGON_M2_mpyui:
   3061     ID = Intrinsic::hexagon_M2_mpyui; break;
   3062 
   3063   case Hexagon::BI__builtin_HEXAGON_M2_maci:
   3064     ID = Intrinsic::hexagon_M2_maci; break;
   3065 
   3066   case Hexagon::BI__builtin_HEXAGON_M2_acci:
   3067     ID = Intrinsic::hexagon_M2_acci; break;
   3068 
   3069   case Hexagon::BI__builtin_HEXAGON_M2_accii:
   3070     ID = Intrinsic::hexagon_M2_accii; break;
   3071 
   3072   case Hexagon::BI__builtin_HEXAGON_M2_nacci:
   3073     ID = Intrinsic::hexagon_M2_nacci; break;
   3074 
   3075   case Hexagon::BI__builtin_HEXAGON_M2_naccii:
   3076     ID = Intrinsic::hexagon_M2_naccii; break;
   3077 
   3078   case Hexagon::BI__builtin_HEXAGON_M2_subacc:
   3079     ID = Intrinsic::hexagon_M2_subacc; break;
   3080 
   3081   case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s0:
   3082     ID = Intrinsic::hexagon_M2_vmpy2s_s0; break;
   3083 
   3084   case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s1:
   3085     ID = Intrinsic::hexagon_M2_vmpy2s_s1; break;
   3086 
   3087   case Hexagon::BI__builtin_HEXAGON_M2_vmac2s_s0:
   3088     ID = Intrinsic::hexagon_M2_vmac2s_s0; break;
   3089 
   3090   case Hexagon::BI__builtin_HEXAGON_M2_vmac2s_s1:
   3091     ID = Intrinsic::hexagon_M2_vmac2s_s1; break;
   3092 
   3093   case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s0pack:
   3094     ID = Intrinsic::hexagon_M2_vmpy2s_s0pack; break;
   3095 
   3096   case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s1pack:
   3097     ID = Intrinsic::hexagon_M2_vmpy2s_s1pack; break;
   3098 
   3099   case Hexagon::BI__builtin_HEXAGON_M2_vmac2:
   3100     ID = Intrinsic::hexagon_M2_vmac2; break;
   3101 
   3102   case Hexagon::BI__builtin_HEXAGON_M2_vmpy2es_s0:
   3103     ID = Intrinsic::hexagon_M2_vmpy2es_s0; break;
   3104 
   3105   case Hexagon::BI__builtin_HEXAGON_M2_vmpy2es_s1:
   3106     ID = Intrinsic::hexagon_M2_vmpy2es_s1; break;
   3107 
   3108   case Hexagon::BI__builtin_HEXAGON_M2_vmac2es_s0:
   3109     ID = Intrinsic::hexagon_M2_vmac2es_s0; break;
   3110 
   3111   case Hexagon::BI__builtin_HEXAGON_M2_vmac2es_s1:
   3112     ID = Intrinsic::hexagon_M2_vmac2es_s1; break;
   3113 
   3114   case Hexagon::BI__builtin_HEXAGON_M2_vmac2es:
   3115     ID = Intrinsic::hexagon_M2_vmac2es; break;
   3116 
   3117   case Hexagon::BI__builtin_HEXAGON_M2_vrmac_s0:
   3118     ID = Intrinsic::hexagon_M2_vrmac_s0; break;
   3119 
   3120   case Hexagon::BI__builtin_HEXAGON_M2_vrmpy_s0:
   3121     ID = Intrinsic::hexagon_M2_vrmpy_s0; break;
   3122 
   3123   case Hexagon::BI__builtin_HEXAGON_M2_vdmpyrs_s0:
   3124     ID = Intrinsic::hexagon_M2_vdmpyrs_s0; break;
   3125 
   3126   case Hexagon::BI__builtin_HEXAGON_M2_vdmpyrs_s1:
   3127     ID = Intrinsic::hexagon_M2_vdmpyrs_s1; break;
   3128 
   3129   case Hexagon::BI__builtin_HEXAGON_M2_vdmacs_s0:
   3130     ID = Intrinsic::hexagon_M2_vdmacs_s0; break;
   3131 
   3132   case Hexagon::BI__builtin_HEXAGON_M2_vdmacs_s1:
   3133     ID = Intrinsic::hexagon_M2_vdmacs_s1; break;
   3134 
   3135   case Hexagon::BI__builtin_HEXAGON_M2_vdmpys_s0:
   3136     ID = Intrinsic::hexagon_M2_vdmpys_s0; break;
   3137 
   3138   case Hexagon::BI__builtin_HEXAGON_M2_vdmpys_s1:
   3139     ID = Intrinsic::hexagon_M2_vdmpys_s1; break;
   3140 
   3141   case Hexagon::BI__builtin_HEXAGON_M2_cmpyrs_s0:
   3142     ID = Intrinsic::hexagon_M2_cmpyrs_s0; break;
   3143 
   3144   case Hexagon::BI__builtin_HEXAGON_M2_cmpyrs_s1:
   3145     ID = Intrinsic::hexagon_M2_cmpyrs_s1; break;
   3146 
   3147   case Hexagon::BI__builtin_HEXAGON_M2_cmpyrsc_s0:
   3148     ID = Intrinsic::hexagon_M2_cmpyrsc_s0; break;
   3149 
   3150   case Hexagon::BI__builtin_HEXAGON_M2_cmpyrsc_s1:
   3151     ID = Intrinsic::hexagon_M2_cmpyrsc_s1; break;
   3152 
   3153   case Hexagon::BI__builtin_HEXAGON_M2_cmacs_s0:
   3154     ID = Intrinsic::hexagon_M2_cmacs_s0; break;
   3155 
   3156   case Hexagon::BI__builtin_HEXAGON_M2_cmacs_s1:
   3157     ID = Intrinsic::hexagon_M2_cmacs_s1; break;
   3158 
   3159   case Hexagon::BI__builtin_HEXAGON_M2_cmacsc_s0:
   3160     ID = Intrinsic::hexagon_M2_cmacsc_s0; break;
   3161 
   3162   case Hexagon::BI__builtin_HEXAGON_M2_cmacsc_s1:
   3163     ID = Intrinsic::hexagon_M2_cmacsc_s1; break;
   3164 
   3165   case Hexagon::BI__builtin_HEXAGON_M2_cmpys_s0:
   3166     ID = Intrinsic::hexagon_M2_cmpys_s0; break;
   3167 
   3168   case Hexagon::BI__builtin_HEXAGON_M2_cmpys_s1:
   3169     ID = Intrinsic::hexagon_M2_cmpys_s1; break;
   3170 
   3171   case Hexagon::BI__builtin_HEXAGON_M2_cmpysc_s0:
   3172     ID = Intrinsic::hexagon_M2_cmpysc_s0; break;
   3173 
   3174   case Hexagon::BI__builtin_HEXAGON_M2_cmpysc_s1:
   3175     ID = Intrinsic::hexagon_M2_cmpysc_s1; break;
   3176 
   3177   case Hexagon::BI__builtin_HEXAGON_M2_cnacs_s0:
   3178     ID = Intrinsic::hexagon_M2_cnacs_s0; break;
   3179 
   3180   case Hexagon::BI__builtin_HEXAGON_M2_cnacs_s1:
   3181     ID = Intrinsic::hexagon_M2_cnacs_s1; break;
   3182 
   3183   case Hexagon::BI__builtin_HEXAGON_M2_cnacsc_s0:
   3184     ID = Intrinsic::hexagon_M2_cnacsc_s0; break;
   3185 
   3186   case Hexagon::BI__builtin_HEXAGON_M2_cnacsc_s1:
   3187     ID = Intrinsic::hexagon_M2_cnacsc_s1; break;
   3188 
   3189   case Hexagon::BI__builtin_HEXAGON_M2_vrcmpys_s1:
   3190     ID = Intrinsic::hexagon_M2_vrcmpys_s1; break;
   3191 
   3192   case Hexagon::BI__builtin_HEXAGON_M2_vrcmpys_acc_s1:
   3193     ID = Intrinsic::hexagon_M2_vrcmpys_acc_s1; break;
   3194 
   3195   case Hexagon::BI__builtin_HEXAGON_M2_vrcmpys_s1rp:
   3196     ID = Intrinsic::hexagon_M2_vrcmpys_s1rp; break;
   3197 
   3198   case Hexagon::BI__builtin_HEXAGON_M2_mmacls_s0:
   3199     ID = Intrinsic::hexagon_M2_mmacls_s0; break;
   3200 
   3201   case Hexagon::BI__builtin_HEXAGON_M2_mmacls_s1:
   3202     ID = Intrinsic::hexagon_M2_mmacls_s1; break;
   3203 
   3204   case Hexagon::BI__builtin_HEXAGON_M2_mmachs_s0:
   3205     ID = Intrinsic::hexagon_M2_mmachs_s0; break;
   3206 
   3207   case Hexagon::BI__builtin_HEXAGON_M2_mmachs_s1:
   3208     ID = Intrinsic::hexagon_M2_mmachs_s1; break;
   3209 
   3210   case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_s0:
   3211     ID = Intrinsic::hexagon_M2_mmpyl_s0; break;
   3212 
   3213   case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_s1:
   3214     ID = Intrinsic::hexagon_M2_mmpyl_s1; break;
   3215 
   3216   case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_s0:
   3217     ID = Intrinsic::hexagon_M2_mmpyh_s0; break;
   3218 
   3219   case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_s1:
   3220     ID = Intrinsic::hexagon_M2_mmpyh_s1; break;
   3221 
   3222   case Hexagon::BI__builtin_HEXAGON_M2_mmacls_rs0:
   3223     ID = Intrinsic::hexagon_M2_mmacls_rs0; break;
   3224 
   3225   case Hexagon::BI__builtin_HEXAGON_M2_mmacls_rs1:
   3226     ID = Intrinsic::hexagon_M2_mmacls_rs1; break;
   3227 
   3228   case Hexagon::BI__builtin_HEXAGON_M2_mmachs_rs0:
   3229     ID = Intrinsic::hexagon_M2_mmachs_rs0; break;
   3230 
   3231   case Hexagon::BI__builtin_HEXAGON_M2_mmachs_rs1:
   3232     ID = Intrinsic::hexagon_M2_mmachs_rs1; break;
   3233 
   3234   case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_rs0:
   3235     ID = Intrinsic::hexagon_M2_mmpyl_rs0; break;
   3236 
   3237   case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_rs1:
   3238     ID = Intrinsic::hexagon_M2_mmpyl_rs1; break;
   3239 
   3240   case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_rs0:
   3241     ID = Intrinsic::hexagon_M2_mmpyh_rs0; break;
   3242 
   3243   case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_rs1:
   3244     ID = Intrinsic::hexagon_M2_mmpyh_rs1; break;
   3245 
   3246   case Hexagon::BI__builtin_HEXAGON_M2_hmmpyl_rs1:
   3247     ID = Intrinsic::hexagon_M2_hmmpyl_rs1; break;
   3248 
   3249   case Hexagon::BI__builtin_HEXAGON_M2_hmmpyh_rs1:
   3250     ID = Intrinsic::hexagon_M2_hmmpyh_rs1; break;
   3251 
   3252   case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_s0:
   3253     ID = Intrinsic::hexagon_M2_mmaculs_s0; break;
   3254 
   3255   case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_s1:
   3256     ID = Intrinsic::hexagon_M2_mmaculs_s1; break;
   3257 
   3258   case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_s0:
   3259     ID = Intrinsic::hexagon_M2_mmacuhs_s0; break;
   3260 
   3261   case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_s1:
   3262     ID = Intrinsic::hexagon_M2_mmacuhs_s1; break;
   3263 
   3264   case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_s0:
   3265     ID = Intrinsic::hexagon_M2_mmpyul_s0; break;
   3266 
   3267   case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_s1:
   3268     ID = Intrinsic::hexagon_M2_mmpyul_s1; break;
   3269 
   3270   case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_s0:
   3271     ID = Intrinsic::hexagon_M2_mmpyuh_s0; break;
   3272 
   3273   case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_s1:
   3274     ID = Intrinsic::hexagon_M2_mmpyuh_s1; break;
   3275 
   3276   case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_rs0:
   3277     ID = Intrinsic::hexagon_M2_mmaculs_rs0; break;
   3278 
   3279   case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_rs1:
   3280     ID = Intrinsic::hexagon_M2_mmaculs_rs1; break;
   3281 
   3282   case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_rs0:
   3283     ID = Intrinsic::hexagon_M2_mmacuhs_rs0; break;
   3284 
   3285   case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_rs1:
   3286     ID = Intrinsic::hexagon_M2_mmacuhs_rs1; break;
   3287 
   3288   case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_rs0:
   3289     ID = Intrinsic::hexagon_M2_mmpyul_rs0; break;
   3290 
   3291   case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_rs1:
   3292     ID = Intrinsic::hexagon_M2_mmpyul_rs1; break;
   3293 
   3294   case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_rs0:
   3295     ID = Intrinsic::hexagon_M2_mmpyuh_rs0; break;
   3296 
   3297   case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_rs1:
   3298     ID = Intrinsic::hexagon_M2_mmpyuh_rs1; break;
   3299 
   3300   case Hexagon::BI__builtin_HEXAGON_M2_vrcmaci_s0:
   3301     ID = Intrinsic::hexagon_M2_vrcmaci_s0; break;
   3302 
   3303   case Hexagon::BI__builtin_HEXAGON_M2_vrcmacr_s0:
   3304     ID = Intrinsic::hexagon_M2_vrcmacr_s0; break;
   3305 
   3306   case Hexagon::BI__builtin_HEXAGON_M2_vrcmaci_s0c:
   3307     ID = Intrinsic::hexagon_M2_vrcmaci_s0c; break;
   3308 
   3309   case Hexagon::BI__builtin_HEXAGON_M2_vrcmacr_s0c:
   3310     ID = Intrinsic::hexagon_M2_vrcmacr_s0c; break;
   3311 
   3312   case Hexagon::BI__builtin_HEXAGON_M2_cmaci_s0:
   3313     ID = Intrinsic::hexagon_M2_cmaci_s0; break;
   3314 
   3315   case Hexagon::BI__builtin_HEXAGON_M2_cmacr_s0:
   3316     ID = Intrinsic::hexagon_M2_cmacr_s0; break;
   3317 
   3318   case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyi_s0:
   3319     ID = Intrinsic::hexagon_M2_vrcmpyi_s0; break;
   3320 
   3321   case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyr_s0:
   3322     ID = Intrinsic::hexagon_M2_vrcmpyr_s0; break;
   3323 
   3324   case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyi_s0c:
   3325     ID = Intrinsic::hexagon_M2_vrcmpyi_s0c; break;
   3326 
   3327   case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyr_s0c:
   3328     ID = Intrinsic::hexagon_M2_vrcmpyr_s0c; break;
   3329 
   3330   case Hexagon::BI__builtin_HEXAGON_M2_cmpyi_s0:
   3331     ID = Intrinsic::hexagon_M2_cmpyi_s0; break;
   3332 
   3333   case Hexagon::BI__builtin_HEXAGON_M2_cmpyr_s0:
   3334     ID = Intrinsic::hexagon_M2_cmpyr_s0; break;
   3335 
   3336   case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s0_sat_i:
   3337     ID = Intrinsic::hexagon_M2_vcmpy_s0_sat_i; break;
   3338 
   3339   case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s0_sat_r:
   3340     ID = Intrinsic::hexagon_M2_vcmpy_s0_sat_r; break;
   3341 
   3342   case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s1_sat_i:
   3343     ID = Intrinsic::hexagon_M2_vcmpy_s1_sat_i; break;
   3344 
   3345   case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s1_sat_r:
   3346     ID = Intrinsic::hexagon_M2_vcmpy_s1_sat_r; break;
   3347 
   3348   case Hexagon::BI__builtin_HEXAGON_M2_vcmac_s0_sat_i:
   3349     ID = Intrinsic::hexagon_M2_vcmac_s0_sat_i; break;
   3350 
   3351   case Hexagon::BI__builtin_HEXAGON_M2_vcmac_s0_sat_r:
   3352     ID = Intrinsic::hexagon_M2_vcmac_s0_sat_r; break;
   3353 
   3354   case Hexagon::BI__builtin_HEXAGON_S2_vcrotate:
   3355     ID = Intrinsic::hexagon_S2_vcrotate; break;
   3356 
   3357   case Hexagon::BI__builtin_HEXAGON_A2_add:
   3358     ID = Intrinsic::hexagon_A2_add; break;
   3359 
   3360   case Hexagon::BI__builtin_HEXAGON_A2_sub:
   3361     ID = Intrinsic::hexagon_A2_sub; break;
   3362 
   3363   case Hexagon::BI__builtin_HEXAGON_A2_addsat:
   3364     ID = Intrinsic::hexagon_A2_addsat; break;
   3365 
   3366   case Hexagon::BI__builtin_HEXAGON_A2_subsat:
   3367     ID = Intrinsic::hexagon_A2_subsat; break;
   3368 
   3369   case Hexagon::BI__builtin_HEXAGON_A2_addi:
   3370     ID = Intrinsic::hexagon_A2_addi; break;
   3371 
   3372   case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_ll:
   3373     ID = Intrinsic::hexagon_A2_addh_l16_ll; break;
   3374 
   3375   case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_hl:
   3376     ID = Intrinsic::hexagon_A2_addh_l16_hl; break;
   3377 
   3378   case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_sat_ll:
   3379     ID = Intrinsic::hexagon_A2_addh_l16_sat_ll; break;
   3380 
   3381   case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_sat_hl:
   3382     ID = Intrinsic::hexagon_A2_addh_l16_sat_hl; break;
   3383 
   3384   case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_ll:
   3385     ID = Intrinsic::hexagon_A2_subh_l16_ll; break;
   3386 
   3387   case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_hl:
   3388     ID = Intrinsic::hexagon_A2_subh_l16_hl; break;
   3389 
   3390   case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_sat_ll:
   3391     ID = Intrinsic::hexagon_A2_subh_l16_sat_ll; break;
   3392 
   3393   case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_sat_hl:
   3394     ID = Intrinsic::hexagon_A2_subh_l16_sat_hl; break;
   3395 
   3396   case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_ll:
   3397     ID = Intrinsic::hexagon_A2_addh_h16_ll; break;
   3398 
   3399   case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_lh:
   3400     ID = Intrinsic::hexagon_A2_addh_h16_lh; break;
   3401 
   3402   case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_hl:
   3403     ID = Intrinsic::hexagon_A2_addh_h16_hl; break;
   3404 
   3405   case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_hh:
   3406     ID = Intrinsic::hexagon_A2_addh_h16_hh; break;
   3407 
   3408   case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_ll:
   3409     ID = Intrinsic::hexagon_A2_addh_h16_sat_ll; break;
   3410 
   3411   case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_lh:
   3412     ID = Intrinsic::hexagon_A2_addh_h16_sat_lh; break;
   3413 
   3414   case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_hl:
   3415     ID = Intrinsic::hexagon_A2_addh_h16_sat_hl; break;
   3416 
   3417   case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_hh:
   3418     ID = Intrinsic::hexagon_A2_addh_h16_sat_hh; break;
   3419 
   3420   case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_ll:
   3421     ID = Intrinsic::hexagon_A2_subh_h16_ll; break;
   3422 
   3423   case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_lh:
   3424     ID = Intrinsic::hexagon_A2_subh_h16_lh; break;
   3425 
   3426   case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_hl:
   3427     ID = Intrinsic::hexagon_A2_subh_h16_hl; break;
   3428 
   3429   case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_hh:
   3430     ID = Intrinsic::hexagon_A2_subh_h16_hh; break;
   3431 
   3432   case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_ll:
   3433     ID = Intrinsic::hexagon_A2_subh_h16_sat_ll; break;
   3434 
   3435   case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_lh:
   3436     ID = Intrinsic::hexagon_A2_subh_h16_sat_lh; break;
   3437 
   3438   case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_hl:
   3439     ID = Intrinsic::hexagon_A2_subh_h16_sat_hl; break;
   3440 
   3441   case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_hh:
   3442     ID = Intrinsic::hexagon_A2_subh_h16_sat_hh; break;
   3443 
   3444   case Hexagon::BI__builtin_HEXAGON_A2_aslh:
   3445     ID = Intrinsic::hexagon_A2_aslh; break;
   3446 
   3447   case Hexagon::BI__builtin_HEXAGON_A2_asrh:
   3448     ID = Intrinsic::hexagon_A2_asrh; break;
   3449 
   3450   case Hexagon::BI__builtin_HEXAGON_A2_addp:
   3451     ID = Intrinsic::hexagon_A2_addp; break;
   3452 
   3453   case Hexagon::BI__builtin_HEXAGON_A2_addpsat:
   3454     ID = Intrinsic::hexagon_A2_addpsat; break;
   3455 
   3456   case Hexagon::BI__builtin_HEXAGON_A2_addsp:
   3457     ID = Intrinsic::hexagon_A2_addsp; break;
   3458 
   3459   case Hexagon::BI__builtin_HEXAGON_A2_subp:
   3460     ID = Intrinsic::hexagon_A2_subp; break;
   3461 
   3462   case Hexagon::BI__builtin_HEXAGON_A2_neg:
   3463     ID = Intrinsic::hexagon_A2_neg; break;
   3464 
   3465   case Hexagon::BI__builtin_HEXAGON_A2_negsat:
   3466     ID = Intrinsic::hexagon_A2_negsat; break;
   3467 
   3468   case Hexagon::BI__builtin_HEXAGON_A2_abs:
   3469     ID = Intrinsic::hexagon_A2_abs; break;
   3470 
   3471   case Hexagon::BI__builtin_HEXAGON_A2_abssat:
   3472     ID = Intrinsic::hexagon_A2_abssat; break;
   3473 
   3474   case Hexagon::BI__builtin_HEXAGON_A2_vconj:
   3475     ID = Intrinsic::hexagon_A2_vconj; break;
   3476 
   3477   case Hexagon::BI__builtin_HEXAGON_A2_negp:
   3478     ID = Intrinsic::hexagon_A2_negp; break;
   3479 
   3480   case Hexagon::BI__builtin_HEXAGON_A2_absp:
   3481     ID = Intrinsic::hexagon_A2_absp; break;
   3482 
   3483   case Hexagon::BI__builtin_HEXAGON_A2_max:
   3484     ID = Intrinsic::hexagon_A2_max; break;
   3485 
   3486   case Hexagon::BI__builtin_HEXAGON_A2_maxu:
   3487     ID = Intrinsic::hexagon_A2_maxu; break;
   3488 
   3489   case Hexagon::BI__builtin_HEXAGON_A2_min:
   3490     ID = Intrinsic::hexagon_A2_min; break;
   3491 
   3492   case Hexagon::BI__builtin_HEXAGON_A2_minu:
   3493     ID = Intrinsic::hexagon_A2_minu; break;
   3494 
   3495   case Hexagon::BI__builtin_HEXAGON_A2_maxp:
   3496     ID = Intrinsic::hexagon_A2_maxp; break;
   3497 
   3498   case Hexagon::BI__builtin_HEXAGON_A2_maxup:
   3499     ID = Intrinsic::hexagon_A2_maxup; break;
   3500 
   3501   case Hexagon::BI__builtin_HEXAGON_A2_minp:
   3502     ID = Intrinsic::hexagon_A2_minp; break;
   3503 
   3504   case Hexagon::BI__builtin_HEXAGON_A2_minup:
   3505     ID = Intrinsic::hexagon_A2_minup; break;
   3506 
   3507   case Hexagon::BI__builtin_HEXAGON_A2_tfr:
   3508     ID = Intrinsic::hexagon_A2_tfr; break;
   3509 
   3510   case Hexagon::BI__builtin_HEXAGON_A2_tfrsi:
   3511     ID = Intrinsic::hexagon_A2_tfrsi; break;
   3512 
   3513   case Hexagon::BI__builtin_HEXAGON_A2_tfrp:
   3514     ID = Intrinsic::hexagon_A2_tfrp; break;
   3515 
   3516   case Hexagon::BI__builtin_HEXAGON_A2_tfrpi:
   3517     ID = Intrinsic::hexagon_A2_tfrpi; break;
   3518 
   3519   case Hexagon::BI__builtin_HEXAGON_A2_zxtb:
   3520     ID = Intrinsic::hexagon_A2_zxtb; break;
   3521 
   3522   case Hexagon::BI__builtin_HEXAGON_A2_sxtb:
   3523     ID = Intrinsic::hexagon_A2_sxtb; break;
   3524 
   3525   case Hexagon::BI__builtin_HEXAGON_A2_zxth:
   3526     ID = Intrinsic::hexagon_A2_zxth; break;
   3527 
   3528   case Hexagon::BI__builtin_HEXAGON_A2_sxth:
   3529     ID = Intrinsic::hexagon_A2_sxth; break;
   3530 
   3531   case Hexagon::BI__builtin_HEXAGON_A2_combinew:
   3532     ID = Intrinsic::hexagon_A2_combinew; break;
   3533 
   3534   case Hexagon::BI__builtin_HEXAGON_A2_combineii:
   3535     ID = Intrinsic::hexagon_A2_combineii; break;
   3536 
   3537   case Hexagon::BI__builtin_HEXAGON_A2_combine_hh:
   3538     ID = Intrinsic::hexagon_A2_combine_hh; break;
   3539 
   3540   case Hexagon::BI__builtin_HEXAGON_A2_combine_hl:
   3541     ID = Intrinsic::hexagon_A2_combine_hl; break;
   3542 
   3543   case Hexagon::BI__builtin_HEXAGON_A2_combine_lh:
   3544     ID = Intrinsic::hexagon_A2_combine_lh; break;
   3545 
   3546   case Hexagon::BI__builtin_HEXAGON_A2_combine_ll:
   3547     ID = Intrinsic::hexagon_A2_combine_ll; break;
   3548 
   3549   case Hexagon::BI__builtin_HEXAGON_A2_tfril:
   3550     ID = Intrinsic::hexagon_A2_tfril; break;
   3551 
   3552   case Hexagon::BI__builtin_HEXAGON_A2_tfrih:
   3553     ID = Intrinsic::hexagon_A2_tfrih; break;
   3554 
   3555   case Hexagon::BI__builtin_HEXAGON_A2_and:
   3556     ID = Intrinsic::hexagon_A2_and; break;
   3557 
   3558   case Hexagon::BI__builtin_HEXAGON_A2_or:
   3559     ID = Intrinsic::hexagon_A2_or; break;
   3560 
   3561   case Hexagon::BI__builtin_HEXAGON_A2_xor:
   3562     ID = Intrinsic::hexagon_A2_xor; break;
   3563 
   3564   case Hexagon::BI__builtin_HEXAGON_A2_not:
   3565     ID = Intrinsic::hexagon_A2_not; break;
   3566 
   3567   case Hexagon::BI__builtin_HEXAGON_M2_xor_xacc:
   3568     ID = Intrinsic::hexagon_M2_xor_xacc; break;
   3569 
   3570   case Hexagon::BI__builtin_HEXAGON_A2_subri:
   3571     ID = Intrinsic::hexagon_A2_subri; break;
   3572 
   3573   case Hexagon::BI__builtin_HEXAGON_A2_andir:
   3574     ID = Intrinsic::hexagon_A2_andir; break;
   3575 
   3576   case Hexagon::BI__builtin_HEXAGON_A2_orir:
   3577     ID = Intrinsic::hexagon_A2_orir; break;
   3578 
   3579   case Hexagon::BI__builtin_HEXAGON_A2_andp:
   3580     ID = Intrinsic::hexagon_A2_andp; break;
   3581 
   3582   case Hexagon::BI__builtin_HEXAGON_A2_orp:
   3583     ID = Intrinsic::hexagon_A2_orp; break;
   3584 
   3585   case Hexagon::BI__builtin_HEXAGON_A2_xorp:
   3586     ID = Intrinsic::hexagon_A2_xorp; break;
   3587 
   3588   case Hexagon::BI__builtin_HEXAGON_A2_notp:
   3589     ID = Intrinsic::hexagon_A2_notp; break;
   3590 
   3591   case Hexagon::BI__builtin_HEXAGON_A2_sxtw:
   3592     ID = Intrinsic::hexagon_A2_sxtw; break;
   3593 
   3594   case Hexagon::BI__builtin_HEXAGON_A2_sat:
   3595     ID = Intrinsic::hexagon_A2_sat; break;
   3596 
   3597   case Hexagon::BI__builtin_HEXAGON_A2_sath:
   3598     ID = Intrinsic::hexagon_A2_sath; break;
   3599 
   3600   case Hexagon::BI__builtin_HEXAGON_A2_satuh:
   3601     ID = Intrinsic::hexagon_A2_satuh; break;
   3602 
   3603   case Hexagon::BI__builtin_HEXAGON_A2_satub:
   3604     ID = Intrinsic::hexagon_A2_satub; break;
   3605 
   3606   case Hexagon::BI__builtin_HEXAGON_A2_satb:
   3607     ID = Intrinsic::hexagon_A2_satb; break;
   3608 
   3609   case Hexagon::BI__builtin_HEXAGON_A2_vaddub:
   3610     ID = Intrinsic::hexagon_A2_vaddub; break;
   3611 
   3612   case Hexagon::BI__builtin_HEXAGON_A2_vaddubs:
   3613     ID = Intrinsic::hexagon_A2_vaddubs; break;
   3614 
   3615   case Hexagon::BI__builtin_HEXAGON_A2_vaddh:
   3616     ID = Intrinsic::hexagon_A2_vaddh; break;
   3617 
   3618   case Hexagon::BI__builtin_HEXAGON_A2_vaddhs:
   3619     ID = Intrinsic::hexagon_A2_vaddhs; break;
   3620 
   3621   case Hexagon::BI__builtin_HEXAGON_A2_vadduhs:
   3622     ID = Intrinsic::hexagon_A2_vadduhs; break;
   3623 
   3624   case Hexagon::BI__builtin_HEXAGON_A2_vaddw:
   3625     ID = Intrinsic::hexagon_A2_vaddw; break;
   3626 
   3627   case Hexagon::BI__builtin_HEXAGON_A2_vaddws:
   3628     ID = Intrinsic::hexagon_A2_vaddws; break;
   3629 
   3630   case Hexagon::BI__builtin_HEXAGON_A2_svavgh:
   3631     ID = Intrinsic::hexagon_A2_svavgh; break;
   3632 
   3633   case Hexagon::BI__builtin_HEXAGON_A2_svavghs:
   3634     ID = Intrinsic::hexagon_A2_svavghs; break;
   3635 
   3636   case Hexagon::BI__builtin_HEXAGON_A2_svnavgh:
   3637     ID = Intrinsic::hexagon_A2_svnavgh; break;
   3638 
   3639   case Hexagon::BI__builtin_HEXAGON_A2_svaddh:
   3640     ID = Intrinsic::hexagon_A2_svaddh; break;
   3641 
   3642   case Hexagon::BI__builtin_HEXAGON_A2_svaddhs:
   3643     ID = Intrinsic::hexagon_A2_svaddhs; break;
   3644 
   3645   case Hexagon::BI__builtin_HEXAGON_A2_svadduhs:
   3646     ID = Intrinsic::hexagon_A2_svadduhs; break;
   3647 
   3648   case Hexagon::BI__builtin_HEXAGON_A2_svsubh:
   3649     ID = Intrinsic::hexagon_A2_svsubh; break;
   3650 
   3651   case Hexagon::BI__builtin_HEXAGON_A2_svsubhs:
   3652     ID = Intrinsic::hexagon_A2_svsubhs; break;
   3653 
   3654   case Hexagon::BI__builtin_HEXAGON_A2_svsubuhs:
   3655     ID = Intrinsic::hexagon_A2_svsubuhs; break;
   3656 
   3657   case Hexagon::BI__builtin_HEXAGON_A2_vraddub:
   3658     ID = Intrinsic::hexagon_A2_vraddub; break;
   3659 
   3660   case Hexagon::BI__builtin_HEXAGON_A2_vraddub_acc:
   3661     ID = Intrinsic::hexagon_A2_vraddub_acc; break;
   3662 
   3663   case Hexagon::BI__builtin_HEXAGON_M2_vradduh:
   3664     ID = Intrinsic::hexagon_M2_vradduh; break;
   3665 
   3666   case Hexagon::BI__builtin_HEXAGON_A2_vsubub:
   3667     ID = Intrinsic::hexagon_A2_vsubub; break;
   3668 
   3669   case Hexagon::BI__builtin_HEXAGON_A2_vsububs:
   3670     ID = Intrinsic::hexagon_A2_vsububs; break;
   3671 
   3672   case Hexagon::BI__builtin_HEXAGON_A2_vsubh:
   3673     ID = Intrinsic::hexagon_A2_vsubh; break;
   3674 
   3675   case Hexagon::BI__builtin_HEXAGON_A2_vsubhs:
   3676     ID = Intrinsic::hexagon_A2_vsubhs; break;
   3677 
   3678   case Hexagon::BI__builtin_HEXAGON_A2_vsubuhs:
   3679     ID = Intrinsic::hexagon_A2_vsubuhs; break;
   3680 
   3681   case Hexagon::BI__builtin_HEXAGON_A2_vsubw:
   3682     ID = Intrinsic::hexagon_A2_vsubw; break;
   3683 
   3684   case Hexagon::BI__builtin_HEXAGON_A2_vsubws:
   3685     ID = Intrinsic::hexagon_A2_vsubws; break;
   3686 
   3687   case Hexagon::BI__builtin_HEXAGON_A2_vabsh:
   3688     ID = Intrinsic::hexagon_A2_vabsh; break;
   3689 
   3690   case Hexagon::BI__builtin_HEXAGON_A2_vabshsat:
   3691     ID = Intrinsic::hexagon_A2_vabshsat; break;
   3692 
   3693   case Hexagon::BI__builtin_HEXAGON_A2_vabsw:
   3694     ID = Intrinsic::hexagon_A2_vabsw; break;
   3695 
   3696   case Hexagon::BI__builtin_HEXAGON_A2_vabswsat:
   3697     ID = Intrinsic::hexagon_A2_vabswsat; break;
   3698 
   3699   case Hexagon::BI__builtin_HEXAGON_M2_vabsdiffw:
   3700     ID = Intrinsic::hexagon_M2_vabsdiffw; break;
   3701 
   3702   case Hexagon::BI__builtin_HEXAGON_M2_vabsdiffh:
   3703     ID = Intrinsic::hexagon_M2_vabsdiffh; break;
   3704 
   3705   case Hexagon::BI__builtin_HEXAGON_A2_vrsadub:
   3706     ID = Intrinsic::hexagon_A2_vrsadub; break;
   3707 
   3708   case Hexagon::BI__builtin_HEXAGON_A2_vrsadub_acc:
   3709     ID = Intrinsic::hexagon_A2_vrsadub_acc; break;
   3710 
   3711   case Hexagon::BI__builtin_HEXAGON_A2_vavgub:
   3712     ID = Intrinsic::hexagon_A2_vavgub; break;
   3713 
   3714   case Hexagon::BI__builtin_HEXAGON_A2_vavguh:
   3715     ID = Intrinsic::hexagon_A2_vavguh; break;
   3716 
   3717   case Hexagon::BI__builtin_HEXAGON_A2_vavgh:
   3718     ID = Intrinsic::hexagon_A2_vavgh; break;
   3719 
   3720   case Hexagon::BI__builtin_HEXAGON_A2_vnavgh:
   3721     ID = Intrinsic::hexagon_A2_vnavgh; break;
   3722 
   3723   case Hexagon::BI__builtin_HEXAGON_A2_vavgw:
   3724     ID = Intrinsic::hexagon_A2_vavgw; break;
   3725 
   3726   case Hexagon::BI__builtin_HEXAGON_A2_vnavgw:
   3727     ID = Intrinsic::hexagon_A2_vnavgw; break;
   3728 
   3729   case Hexagon::BI__builtin_HEXAGON_A2_vavgwr:
   3730     ID = Intrinsic::hexagon_A2_vavgwr; break;
   3731 
   3732   case Hexagon::BI__builtin_HEXAGON_A2_vnavgwr:
   3733     ID = Intrinsic::hexagon_A2_vnavgwr; break;
   3734 
   3735   case Hexagon::BI__builtin_HEXAGON_A2_vavgwcr:
   3736     ID = Intrinsic::hexagon_A2_vavgwcr; break;
   3737 
   3738   case Hexagon::BI__builtin_HEXAGON_A2_vnavgwcr:
   3739     ID = Intrinsic::hexagon_A2_vnavgwcr; break;
   3740 
   3741   case Hexagon::BI__builtin_HEXAGON_A2_vavghcr:
   3742     ID = Intrinsic::hexagon_A2_vavghcr; break;
   3743 
   3744   case Hexagon::BI__builtin_HEXAGON_A2_vnavghcr:
   3745     ID = Intrinsic::hexagon_A2_vnavghcr; break;
   3746 
   3747   case Hexagon::BI__builtin_HEXAGON_A2_vavguw:
   3748     ID = Intrinsic::hexagon_A2_vavguw; break;
   3749 
   3750   case Hexagon::BI__builtin_HEXAGON_A2_vavguwr:
   3751     ID = Intrinsic::hexagon_A2_vavguwr; break;
   3752 
   3753   case Hexagon::BI__builtin_HEXAGON_A2_vavgubr:
   3754     ID = Intrinsic::hexagon_A2_vavgubr; break;
   3755 
   3756   case Hexagon::BI__builtin_HEXAGON_A2_vavguhr:
   3757     ID = Intrinsic::hexagon_A2_vavguhr; break;
   3758 
   3759   case Hexagon::BI__builtin_HEXAGON_A2_vavghr:
   3760     ID = Intrinsic::hexagon_A2_vavghr; break;
   3761 
   3762   case Hexagon::BI__builtin_HEXAGON_A2_vnavghr:
   3763     ID = Intrinsic::hexagon_A2_vnavghr; break;
   3764 
   3765   case Hexagon::BI__builtin_HEXAGON_A2_vminh:
   3766     ID = Intrinsic::hexagon_A2_vminh; break;
   3767 
   3768   case Hexagon::BI__builtin_HEXAGON_A2_vmaxh:
   3769     ID = Intrinsic::hexagon_A2_vmaxh; break;
   3770 
   3771   case Hexagon::BI__builtin_HEXAGON_A2_vminub:
   3772     ID = Intrinsic::hexagon_A2_vminub; break;
   3773 
   3774   case Hexagon::BI__builtin_HEXAGON_A2_vmaxub:
   3775     ID = Intrinsic::hexagon_A2_vmaxub; break;
   3776 
   3777   case Hexagon::BI__builtin_HEXAGON_A2_vminuh:
   3778     ID = Intrinsic::hexagon_A2_vminuh; break;
   3779 
   3780   case Hexagon::BI__builtin_HEXAGON_A2_vmaxuh:
   3781     ID = Intrinsic::hexagon_A2_vmaxuh; break;
   3782 
   3783   case Hexagon::BI__builtin_HEXAGON_A2_vminw:
   3784     ID = Intrinsic::hexagon_A2_vminw; break;
   3785 
   3786   case Hexagon::BI__builtin_HEXAGON_A2_vmaxw:
   3787     ID = Intrinsic::hexagon_A2_vmaxw; break;
   3788 
   3789   case Hexagon::BI__builtin_HEXAGON_A2_vminuw:
   3790     ID = Intrinsic::hexagon_A2_vminuw; break;
   3791 
   3792   case Hexagon::BI__builtin_HEXAGON_A2_vmaxuw:
   3793     ID = Intrinsic::hexagon_A2_vmaxuw; break;
   3794 
   3795   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r:
   3796     ID = Intrinsic::hexagon_S2_asr_r_r; break;
   3797 
   3798   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r:
   3799     ID = Intrinsic::hexagon_S2_asl_r_r; break;
   3800 
   3801   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r:
   3802     ID = Intrinsic::hexagon_S2_lsr_r_r; break;
   3803 
   3804   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r:
   3805     ID = Intrinsic::hexagon_S2_lsl_r_r; break;
   3806 
   3807   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p:
   3808     ID = Intrinsic::hexagon_S2_asr_r_p; break;
   3809 
   3810   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p:
   3811     ID = Intrinsic::hexagon_S2_asl_r_p; break;
   3812 
   3813   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p:
   3814     ID = Intrinsic::hexagon_S2_lsr_r_p; break;
   3815 
   3816   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p:
   3817     ID = Intrinsic::hexagon_S2_lsl_r_p; break;
   3818 
   3819   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_acc:
   3820     ID = Intrinsic::hexagon_S2_asr_r_r_acc; break;
   3821 
   3822   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_acc:
   3823     ID = Intrinsic::hexagon_S2_asl_r_r_acc; break;
   3824 
   3825   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_acc:
   3826     ID = Intrinsic::hexagon_S2_lsr_r_r_acc; break;
   3827 
   3828   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_acc:
   3829     ID = Intrinsic::hexagon_S2_lsl_r_r_acc; break;
   3830 
   3831   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_acc:
   3832     ID = Intrinsic::hexagon_S2_asr_r_p_acc; break;
   3833 
   3834   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_acc:
   3835     ID = Intrinsic::hexagon_S2_asl_r_p_acc; break;
   3836 
   3837   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_acc:
   3838     ID = Intrinsic::hexagon_S2_lsr_r_p_acc; break;
   3839 
   3840   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_acc:
   3841     ID = Intrinsic::hexagon_S2_lsl_r_p_acc; break;
   3842 
   3843   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_nac:
   3844     ID = Intrinsic::hexagon_S2_asr_r_r_nac; break;
   3845 
   3846   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_nac:
   3847     ID = Intrinsic::hexagon_S2_asl_r_r_nac; break;
   3848 
   3849   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_nac:
   3850     ID = Intrinsic::hexagon_S2_lsr_r_r_nac; break;
   3851 
   3852   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_nac:
   3853     ID = Intrinsic::hexagon_S2_lsl_r_r_nac; break;
   3854 
   3855   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_nac:
   3856     ID = Intrinsic::hexagon_S2_asr_r_p_nac; break;
   3857 
   3858   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_nac:
   3859     ID = Intrinsic::hexagon_S2_asl_r_p_nac; break;
   3860 
   3861   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_nac:
   3862     ID = Intrinsic::hexagon_S2_lsr_r_p_nac; break;
   3863 
   3864   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_nac:
   3865     ID = Intrinsic::hexagon_S2_lsl_r_p_nac; break;
   3866 
   3867   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_and:
   3868     ID = Intrinsic::hexagon_S2_asr_r_r_and; break;
   3869 
   3870   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_and:
   3871     ID = Intrinsic::hexagon_S2_asl_r_r_and; break;
   3872 
   3873   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_and:
   3874     ID = Intrinsic::hexagon_S2_lsr_r_r_and; break;
   3875 
   3876   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_and:
   3877     ID = Intrinsic::hexagon_S2_lsl_r_r_and; break;
   3878 
   3879   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_or:
   3880     ID = Intrinsic::hexagon_S2_asr_r_r_or; break;
   3881 
   3882   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_or:
   3883     ID = Intrinsic::hexagon_S2_asl_r_r_or; break;
   3884 
   3885   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_or:
   3886     ID = Intrinsic::hexagon_S2_lsr_r_r_or; break;
   3887 
   3888   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_or:
   3889     ID = Intrinsic::hexagon_S2_lsl_r_r_or; break;
   3890 
   3891   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_and:
   3892     ID = Intrinsic::hexagon_S2_asr_r_p_and; break;
   3893 
   3894   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_and:
   3895     ID = Intrinsic::hexagon_S2_asl_r_p_and; break;
   3896 
   3897   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_and:
   3898     ID = Intrinsic::hexagon_S2_lsr_r_p_and; break;
   3899 
   3900   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_and:
   3901     ID = Intrinsic::hexagon_S2_lsl_r_p_and; break;
   3902 
   3903   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_or:
   3904     ID = Intrinsic::hexagon_S2_asr_r_p_or; break;
   3905 
   3906   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_or:
   3907     ID = Intrinsic::hexagon_S2_asl_r_p_or; break;
   3908 
   3909   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_or:
   3910     ID = Intrinsic::hexagon_S2_lsr_r_p_or; break;
   3911 
   3912   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_or:
   3913     ID = Intrinsic::hexagon_S2_lsl_r_p_or; break;
   3914 
   3915   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_sat:
   3916     ID = Intrinsic::hexagon_S2_asr_r_r_sat; break;
   3917 
   3918   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_sat:
   3919     ID = Intrinsic::hexagon_S2_asl_r_r_sat; break;
   3920 
   3921   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r:
   3922     ID = Intrinsic::hexagon_S2_asr_i_r; break;
   3923 
   3924   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r:
   3925     ID = Intrinsic::hexagon_S2_lsr_i_r; break;
   3926 
   3927   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r:
   3928     ID = Intrinsic::hexagon_S2_asl_i_r; break;
   3929 
   3930   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p:
   3931     ID = Intrinsic::hexagon_S2_asr_i_p; break;
   3932 
   3933   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p:
   3934     ID = Intrinsic::hexagon_S2_lsr_i_p; break;
   3935 
   3936   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p:
   3937     ID = Intrinsic::hexagon_S2_asl_i_p; break;
   3938 
   3939   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_acc:
   3940     ID = Intrinsic::hexagon_S2_asr_i_r_acc; break;
   3941 
   3942   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_acc:
   3943     ID = Intrinsic::hexagon_S2_lsr_i_r_acc; break;
   3944 
   3945   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_acc:
   3946     ID = Intrinsic::hexagon_S2_asl_i_r_acc; break;
   3947 
   3948   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_acc:
   3949     ID = Intrinsic::hexagon_S2_asr_i_p_acc; break;
   3950 
   3951   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_acc:
   3952     ID = Intrinsic::hexagon_S2_lsr_i_p_acc; break;
   3953 
   3954   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_acc:
   3955     ID = Intrinsic::hexagon_S2_asl_i_p_acc; break;
   3956 
   3957   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_nac:
   3958     ID = Intrinsic::hexagon_S2_asr_i_r_nac; break;
   3959 
   3960   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_nac:
   3961     ID = Intrinsic::hexagon_S2_lsr_i_r_nac; break;
   3962 
   3963   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_nac:
   3964     ID = Intrinsic::hexagon_S2_asl_i_r_nac; break;
   3965 
   3966   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_nac:
   3967     ID = Intrinsic::hexagon_S2_asr_i_p_nac; break;
   3968 
   3969   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_nac:
   3970     ID = Intrinsic::hexagon_S2_lsr_i_p_nac; break;
   3971 
   3972   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_nac:
   3973     ID = Intrinsic::hexagon_S2_asl_i_p_nac; break;
   3974 
   3975   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_xacc:
   3976     ID = Intrinsic::hexagon_S2_lsr_i_r_xacc; break;
   3977 
   3978   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_xacc:
   3979     ID = Intrinsic::hexagon_S2_asl_i_r_xacc; break;
   3980 
   3981   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_xacc:
   3982     ID = Intrinsic::hexagon_S2_lsr_i_p_xacc; break;
   3983 
   3984   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_xacc:
   3985     ID = Intrinsic::hexagon_S2_asl_i_p_xacc; break;
   3986 
   3987   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_and:
   3988     ID = Intrinsic::hexagon_S2_asr_i_r_and; break;
   3989 
   3990   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_and:
   3991     ID = Intrinsic::hexagon_S2_lsr_i_r_and; break;
   3992 
   3993   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_and:
   3994     ID = Intrinsic::hexagon_S2_asl_i_r_and; break;
   3995 
   3996   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_or:
   3997     ID = Intrinsic::hexagon_S2_asr_i_r_or; break;
   3998 
   3999   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_or:
   4000     ID = Intrinsic::hexagon_S2_lsr_i_r_or; break;
   4001 
   4002   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_or:
   4003     ID = Intrinsic::hexagon_S2_asl_i_r_or; break;
   4004 
   4005   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_and:
   4006     ID = Intrinsic::hexagon_S2_asr_i_p_and; break;
   4007 
   4008   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_and:
   4009     ID = Intrinsic::hexagon_S2_lsr_i_p_and; break;
   4010 
   4011   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_and:
   4012     ID = Intrinsic::hexagon_S2_asl_i_p_and; break;
   4013 
   4014   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_or:
   4015     ID = Intrinsic::hexagon_S2_asr_i_p_or; break;
   4016 
   4017   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_or:
   4018     ID = Intrinsic::hexagon_S2_lsr_i_p_or; break;
   4019 
   4020   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_or:
   4021     ID = Intrinsic::hexagon_S2_asl_i_p_or; break;
   4022 
   4023   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_sat:
   4024     ID = Intrinsic::hexagon_S2_asl_i_r_sat; break;
   4025 
   4026   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd:
   4027     ID = Intrinsic::hexagon_S2_asr_i_r_rnd; break;
   4028 
   4029   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd_goodsyntax:
   4030     ID = Intrinsic::hexagon_S2_asr_i_r_rnd_goodsyntax; break;
   4031 
   4032   case Hexagon::BI__builtin_HEXAGON_S2_addasl_rrri:
   4033     ID = Intrinsic::hexagon_S2_addasl_rrri; break;
   4034 
   4035   case Hexagon::BI__builtin_HEXAGON_S2_valignib:
   4036     ID = Intrinsic::hexagon_S2_valignib; break;
   4037 
   4038   case Hexagon::BI__builtin_HEXAGON_S2_valignrb:
   4039     ID = Intrinsic::hexagon_S2_valignrb; break;
   4040 
   4041   case Hexagon::BI__builtin_HEXAGON_S2_vspliceib:
   4042     ID = Intrinsic::hexagon_S2_vspliceib; break;
   4043 
   4044   case Hexagon::BI__builtin_HEXAGON_S2_vsplicerb:
   4045     ID = Intrinsic::hexagon_S2_vsplicerb; break;
   4046 
   4047   case Hexagon::BI__builtin_HEXAGON_S2_vsplatrh:
   4048     ID = Intrinsic::hexagon_S2_vsplatrh; break;
   4049 
   4050   case Hexagon::BI__builtin_HEXAGON_S2_vsplatrb:
   4051     ID = Intrinsic::hexagon_S2_vsplatrb; break;
   4052 
   4053   case Hexagon::BI__builtin_HEXAGON_S2_insert:
   4054     ID = Intrinsic::hexagon_S2_insert; break;
   4055 
   4056   case Hexagon::BI__builtin_HEXAGON_S2_tableidxb_goodsyntax:
   4057     ID = Intrinsic::hexagon_S2_tableidxb_goodsyntax; break;
   4058 
   4059   case Hexagon::BI__builtin_HEXAGON_S2_tableidxh_goodsyntax:
   4060     ID = Intrinsic::hexagon_S2_tableidxh_goodsyntax; break;
   4061 
   4062   case Hexagon::BI__builtin_HEXAGON_S2_tableidxw_goodsyntax:
   4063     ID = Intrinsic::hexagon_S2_tableidxw_goodsyntax; break;
   4064 
   4065   case Hexagon::BI__builtin_HEXAGON_S2_tableidxd_goodsyntax:
   4066     ID = Intrinsic::hexagon_S2_tableidxd_goodsyntax; break;
   4067 
   4068   case Hexagon::BI__builtin_HEXAGON_S2_extractu:
   4069     ID = Intrinsic::hexagon_S2_extractu; break;
   4070 
   4071   case Hexagon::BI__builtin_HEXAGON_S2_insertp:
   4072     ID = Intrinsic::hexagon_S2_insertp; break;
   4073 
   4074   case Hexagon::BI__builtin_HEXAGON_S2_extractup:
   4075     ID = Intrinsic::hexagon_S2_extractup; break;
   4076 
   4077   case Hexagon::BI__builtin_HEXAGON_S2_insert_rp:
   4078     ID = Intrinsic::hexagon_S2_insert_rp; break;
   4079 
   4080   case Hexagon::BI__builtin_HEXAGON_S2_extractu_rp:
   4081     ID = Intrinsic::hexagon_S2_extractu_rp; break;
   4082 
   4083   case Hexagon::BI__builtin_HEXAGON_S2_insertp_rp:
   4084     ID = Intrinsic::hexagon_S2_insertp_rp; break;
   4085 
   4086   case Hexagon::BI__builtin_HEXAGON_S2_extractup_rp:
   4087     ID = Intrinsic::hexagon_S2_extractup_rp; break;
   4088 
   4089   case Hexagon::BI__builtin_HEXAGON_S2_tstbit_i:
   4090     ID = Intrinsic::hexagon_S2_tstbit_i; break;
   4091 
   4092   case Hexagon::BI__builtin_HEXAGON_S2_setbit_i:
   4093     ID = Intrinsic::hexagon_S2_setbit_i; break;
   4094 
   4095   case Hexagon::BI__builtin_HEXAGON_S2_togglebit_i:
   4096     ID = Intrinsic::hexagon_S2_togglebit_i; break;
   4097 
   4098   case Hexagon::BI__builtin_HEXAGON_S2_clrbit_i:
   4099     ID = Intrinsic::hexagon_S2_clrbit_i; break;
   4100 
   4101   case Hexagon::BI__builtin_HEXAGON_S2_tstbit_r:
   4102     ID = Intrinsic::hexagon_S2_tstbit_r; break;
   4103 
   4104   case Hexagon::BI__builtin_HEXAGON_S2_setbit_r:
   4105     ID = Intrinsic::hexagon_S2_setbit_r; break;
   4106 
   4107   case Hexagon::BI__builtin_HEXAGON_S2_togglebit_r:
   4108     ID = Intrinsic::hexagon_S2_togglebit_r; break;
   4109 
   4110   case Hexagon::BI__builtin_HEXAGON_S2_clrbit_r:
   4111     ID = Intrinsic::hexagon_S2_clrbit_r; break;
   4112 
   4113   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_vh:
   4114     ID = Intrinsic::hexagon_S2_asr_i_vh; break;
   4115 
   4116   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vh:
   4117     ID = Intrinsic::hexagon_S2_lsr_i_vh; break;
   4118 
   4119   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_vh:
   4120     ID = Intrinsic::hexagon_S2_asl_i_vh; break;
   4121 
   4122   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_vh:
   4123     ID = Intrinsic::hexagon_S2_asr_r_vh; break;
   4124 
   4125   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_vh:
   4126     ID = Intrinsic::hexagon_S2_asl_r_vh; break;
   4127 
   4128   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_vh:
   4129     ID = Intrinsic::hexagon_S2_lsr_r_vh; break;
   4130 
   4131   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_vh:
   4132     ID = Intrinsic::hexagon_S2_lsl_r_vh; break;
   4133 
   4134   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_vw:
   4135     ID = Intrinsic::hexagon_S2_asr_i_vw; break;
   4136 
   4137   case Hexagon::BI__builtin_HEXAGON_S2_asr_i_svw_trun:
   4138     ID = Intrinsic::hexagon_S2_asr_i_svw_trun; break;
   4139 
   4140   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_svw_trun:
   4141     ID = Intrinsic::hexagon_S2_asr_r_svw_trun; break;
   4142 
   4143   case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vw:
   4144     ID = Intrinsic::hexagon_S2_lsr_i_vw; break;
   4145 
   4146   case Hexagon::BI__builtin_HEXAGON_S2_asl_i_vw:
   4147     ID = Intrinsic::hexagon_S2_asl_i_vw; break;
   4148 
   4149   case Hexagon::BI__builtin_HEXAGON_S2_asr_r_vw:
   4150     ID = Intrinsic::hexagon_S2_asr_r_vw; break;
   4151 
   4152   case Hexagon::BI__builtin_HEXAGON_S2_asl_r_vw:
   4153     ID = Intrinsic::hexagon_S2_asl_r_vw; break;
   4154 
   4155   case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_vw:
   4156     ID = Intrinsic::hexagon_S2_lsr_r_vw; break;
   4157 
   4158   case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_vw:
   4159     ID = Intrinsic::hexagon_S2_lsl_r_vw; break;
   4160 
   4161   case Hexagon::BI__builtin_HEXAGON_S2_vrndpackwh:
   4162     ID = Intrinsic::hexagon_S2_vrndpackwh; break;
   4163 
   4164   case Hexagon::BI__builtin_HEXAGON_S2_vrndpackwhs:
   4165     ID = Intrinsic::hexagon_S2_vrndpackwhs; break;
   4166 
   4167   case Hexagon::BI__builtin_HEXAGON_S2_vsxtbh:
   4168     ID = Intrinsic::hexagon_S2_vsxtbh; break;
   4169 
   4170   case Hexagon::BI__builtin_HEXAGON_S2_vzxtbh:
   4171     ID = Intrinsic::hexagon_S2_vzxtbh; break;
   4172 
   4173   case Hexagon::BI__builtin_HEXAGON_S2_vsathub:
   4174     ID = Intrinsic::hexagon_S2_vsathub; break;
   4175 
   4176   case Hexagon::BI__builtin_HEXAGON_S2_svsathub:
   4177     ID = Intrinsic::hexagon_S2_svsathub; break;
   4178 
   4179   case Hexagon::BI__builtin_HEXAGON_S2_svsathb:
   4180     ID = Intrinsic::hexagon_S2_svsathb; break;
   4181 
   4182   case Hexagon::BI__builtin_HEXAGON_S2_vsathb:
   4183     ID = Intrinsic::hexagon_S2_vsathb; break;
   4184 
   4185   case Hexagon::BI__builtin_HEXAGON_S2_vtrunohb:
   4186     ID = Intrinsic::hexagon_S2_vtrunohb; break;
   4187 
   4188   case Hexagon::BI__builtin_HEXAGON_S2_vtrunewh:
   4189     ID = Intrinsic::hexagon_S2_vtrunewh; break;
   4190 
   4191   case Hexagon::BI__builtin_HEXAGON_S2_vtrunowh:
   4192     ID = Intrinsic::hexagon_S2_vtrunowh; break;
   4193 
   4194   case Hexagon::BI__builtin_HEXAGON_S2_vtrunehb:
   4195     ID = Intrinsic::hexagon_S2_vtrunehb; break;
   4196 
   4197   case Hexagon::BI__builtin_HEXAGON_S2_vsxthw:
   4198     ID = Intrinsic::hexagon_S2_vsxthw; break;
   4199 
   4200   case Hexagon::BI__builtin_HEXAGON_S2_vzxthw:
   4201     ID = Intrinsic::hexagon_S2_vzxthw; break;
   4202 
   4203   case Hexagon::BI__builtin_HEXAGON_S2_vsatwh:
   4204     ID = Intrinsic::hexagon_S2_vsatwh; break;
   4205 
   4206   case Hexagon::BI__builtin_HEXAGON_S2_vsatwuh:
   4207     ID = Intrinsic::hexagon_S2_vsatwuh; break;
   4208 
   4209   case Hexagon::BI__builtin_HEXAGON_S2_packhl:
   4210     ID = Intrinsic::hexagon_S2_packhl; break;
   4211 
   4212   case Hexagon::BI__builtin_HEXAGON_A2_swiz:
   4213     ID = Intrinsic::hexagon_A2_swiz; break;
   4214 
   4215   case Hexagon::BI__builtin_HEXAGON_S2_vsathub_nopack:
   4216     ID = Intrinsic::hexagon_S2_vsathub_nopack; break;
   4217 
   4218   case Hexagon::BI__builtin_HEXAGON_S2_vsathb_nopack:
   4219     ID = Intrinsic::hexagon_S2_vsathb_nopack; break;
   4220 
   4221   case Hexagon::BI__builtin_HEXAGON_S2_vsatwh_nopack:
   4222     ID = Intrinsic::hexagon_S2_vsatwh_nopack; break;
   4223 
   4224   case Hexagon::BI__builtin_HEXAGON_S2_vsatwuh_nopack:
   4225     ID = Intrinsic::hexagon_S2_vsatwuh_nopack; break;
   4226 
   4227   case Hexagon::BI__builtin_HEXAGON_S2_shuffob:
   4228     ID = Intrinsic::hexagon_S2_shuffob; break;
   4229 
   4230   case Hexagon::BI__builtin_HEXAGON_S2_shuffeb:
   4231     ID = Intrinsic::hexagon_S2_shuffeb; break;
   4232 
   4233   case Hexagon::BI__builtin_HEXAGON_S2_shuffoh:
   4234     ID = Intrinsic::hexagon_S2_shuffoh; break;
   4235 
   4236   case Hexagon::BI__builtin_HEXAGON_S2_shuffeh:
   4237     ID = Intrinsic::hexagon_S2_shuffeh; break;
   4238 
   4239   case Hexagon::BI__builtin_HEXAGON_S2_parityp:
   4240     ID = Intrinsic::hexagon_S2_parityp; break;
   4241 
   4242   case Hexagon::BI__builtin_HEXAGON_S2_lfsp:
   4243     ID = Intrinsic::hexagon_S2_lfsp; break;
   4244 
   4245   case Hexagon::BI__builtin_HEXAGON_S2_clbnorm:
   4246     ID = Intrinsic::hexagon_S2_clbnorm; break;
   4247 
   4248   case Hexagon::BI__builtin_HEXAGON_S2_clb:
   4249     ID = Intrinsic::hexagon_S2_clb; break;
   4250 
   4251   case Hexagon::BI__builtin_HEXAGON_S2_cl0:
   4252     ID = Intrinsic::hexagon_S2_cl0; break;
   4253 
   4254   case Hexagon::BI__builtin_HEXAGON_S2_cl1:
   4255     ID = Intrinsic::hexagon_S2_cl1; break;
   4256 
   4257   case Hexagon::BI__builtin_HEXAGON_S2_clbp:
   4258     ID = Intrinsic::hexagon_S2_clbp; break;
   4259 
   4260   case Hexagon::BI__builtin_HEXAGON_S2_cl0p:
   4261     ID = Intrinsic::hexagon_S2_cl0p; break;
   4262 
   4263   case Hexagon::BI__builtin_HEXAGON_S2_cl1p:
   4264     ID = Intrinsic::hexagon_S2_cl1p; break;
   4265 
   4266   case Hexagon::BI__builtin_HEXAGON_S2_brev:
   4267     ID = Intrinsic::hexagon_S2_brev; break;
   4268 
   4269   case Hexagon::BI__builtin_HEXAGON_S2_ct0:
   4270     ID = Intrinsic::hexagon_S2_ct0; break;
   4271 
   4272   case Hexagon::BI__builtin_HEXAGON_S2_ct1:
   4273     ID = Intrinsic::hexagon_S2_ct1; break;
   4274 
   4275   case Hexagon::BI__builtin_HEXAGON_S2_interleave:
   4276     ID = Intrinsic::hexagon_S2_interleave; break;
   4277 
   4278   case Hexagon::BI__builtin_HEXAGON_S2_deinterleave:
   4279     ID = Intrinsic::hexagon_S2_deinterleave; break;
   4280 
   4281   case Hexagon::BI__builtin_SI_to_SXTHI_asrh:
   4282     ID = Intrinsic::hexagon_SI_to_SXTHI_asrh; break;
   4283 
   4284   case Hexagon::BI__builtin_HEXAGON_A4_orn:
   4285     ID = Intrinsic::hexagon_A4_orn; break;
   4286 
   4287   case Hexagon::BI__builtin_HEXAGON_A4_andn:
   4288     ID = Intrinsic::hexagon_A4_andn; break;
   4289 
   4290   case Hexagon::BI__builtin_HEXAGON_A4_ornp:
   4291     ID = Intrinsic::hexagon_A4_ornp; break;
   4292 
   4293   case Hexagon::BI__builtin_HEXAGON_A4_andnp:
   4294     ID = Intrinsic::hexagon_A4_andnp; break;
   4295 
   4296   case Hexagon::BI__builtin_HEXAGON_A4_combineir:
   4297     ID = Intrinsic::hexagon_A4_combineir; break;
   4298 
   4299   case Hexagon::BI__builtin_HEXAGON_A4_combineri:
   4300     ID = Intrinsic::hexagon_A4_combineri; break;
   4301 
   4302   case Hexagon::BI__builtin_HEXAGON_C4_cmpneqi:
   4303     ID = Intrinsic::hexagon_C4_cmpneqi; break;
   4304 
   4305   case Hexagon::BI__builtin_HEXAGON_C4_cmpneq:
   4306     ID = Intrinsic::hexagon_C4_cmpneq; break;
   4307 
   4308   case Hexagon::BI__builtin_HEXAGON_C4_cmpltei:
   4309     ID = Intrinsic::hexagon_C4_cmpltei; break;
   4310 
   4311   case Hexagon::BI__builtin_HEXAGON_C4_cmplte:
   4312     ID = Intrinsic::hexagon_C4_cmplte; break;
   4313 
   4314   case Hexagon::BI__builtin_HEXAGON_C4_cmplteui:
   4315     ID = Intrinsic::hexagon_C4_cmplteui; break;
   4316 
   4317   case Hexagon::BI__builtin_HEXAGON_C4_cmplteu:
   4318     ID = Intrinsic::hexagon_C4_cmplteu; break;
   4319 
   4320   case Hexagon::BI__builtin_HEXAGON_A4_rcmpneq:
   4321     ID = Intrinsic::hexagon_A4_rcmpneq; break;
   4322 
   4323   case Hexagon::BI__builtin_HEXAGON_A4_rcmpneqi:
   4324     ID = Intrinsic::hexagon_A4_rcmpneqi; break;
   4325 
   4326   case Hexagon::BI__builtin_HEXAGON_A4_rcmpeq:
   4327     ID = Intrinsic::hexagon_A4_rcmpeq; break;
   4328 
   4329   case Hexagon::BI__builtin_HEXAGON_A4_rcmpeqi:
   4330     ID = Intrinsic::hexagon_A4_rcmpeqi; break;
   4331 
   4332   case Hexagon::BI__builtin_HEXAGON_C4_fastcorner9:
   4333     ID = Intrinsic::hexagon_C4_fastcorner9; break;
   4334 
   4335   case Hexagon::BI__builtin_HEXAGON_C4_fastcorner9_not:
   4336     ID = Intrinsic::hexagon_C4_fastcorner9_not; break;
   4337 
   4338   case Hexagon::BI__builtin_HEXAGON_C4_and_andn:
   4339     ID = Intrinsic::hexagon_C4_and_andn; break;
   4340 
   4341   case Hexagon::BI__builtin_HEXAGON_C4_and_and:
   4342     ID = Intrinsic::hexagon_C4_and_and; break;
   4343 
   4344   case Hexagon::BI__builtin_HEXAGON_C4_and_orn:
   4345     ID = Intrinsic::hexagon_C4_and_orn; break;
   4346 
   4347   case Hexagon::BI__builtin_HEXAGON_C4_and_or:
   4348     ID = Intrinsic::hexagon_C4_and_or; break;
   4349 
   4350   case Hexagon::BI__builtin_HEXAGON_C4_or_andn:
   4351     ID = Intrinsic::hexagon_C4_or_andn; break;
   4352 
   4353   case Hexagon::BI__builtin_HEXAGON_C4_or_and:
   4354     ID = Intrinsic::hexagon_C4_or_and; break;
   4355 
   4356   case Hexagon::BI__builtin_HEXAGON_C4_or_orn:
   4357     ID = Intrinsic::hexagon_C4_or_orn; break;
   4358 
   4359   case Hexagon::BI__builtin_HEXAGON_C4_or_or:
   4360     ID = Intrinsic::hexagon_C4_or_or; break;
   4361 
   4362   case Hexagon::BI__builtin_HEXAGON_S4_addaddi:
   4363     ID = Intrinsic::hexagon_S4_addaddi; break;
   4364 
   4365   case Hexagon::BI__builtin_HEXAGON_S4_subaddi:
   4366     ID = Intrinsic::hexagon_S4_subaddi; break;
   4367 
   4368   case Hexagon::BI__builtin_HEXAGON_M4_xor_xacc:
   4369     ID = Intrinsic::hexagon_M4_xor_xacc; break;
   4370 
   4371   case Hexagon::BI__builtin_HEXAGON_M4_and_and:
   4372     ID = Intrinsic::hexagon_M4_and_and; break;
   4373 
   4374   case Hexagon::BI__builtin_HEXAGON_M4_and_or:
   4375     ID = Intrinsic::hexagon_M4_and_or; break;
   4376 
   4377   case Hexagon::BI__builtin_HEXAGON_M4_and_xor:
   4378     ID = Intrinsic::hexagon_M4_and_xor; break;
   4379 
   4380   case Hexagon::BI__builtin_HEXAGON_M4_and_andn:
   4381     ID = Intrinsic::hexagon_M4_and_andn; break;
   4382 
   4383   case Hexagon::BI__builtin_HEXAGON_M4_xor_and:
   4384     ID = Intrinsic::hexagon_M4_xor_and; break;
   4385 
   4386   case Hexagon::BI__builtin_HEXAGON_M4_xor_or:
   4387     ID = Intrinsic::hexagon_M4_xor_or; break;
   4388 
   4389   case Hexagon::BI__builtin_HEXAGON_M4_xor_andn:
   4390     ID = Intrinsic::hexagon_M4_xor_andn; break;
   4391 
   4392   case Hexagon::BI__builtin_HEXAGON_M4_or_and:
   4393     ID = Intrinsic::hexagon_M4_or_and; break;
   4394 
   4395   case Hexagon::BI__builtin_HEXAGON_M4_or_or:
   4396     ID = Intrinsic::hexagon_M4_or_or; break;
   4397 
   4398   case Hexagon::BI__builtin_HEXAGON_M4_or_xor:
   4399     ID = Intrinsic::hexagon_M4_or_xor; break;
   4400 
   4401   case Hexagon::BI__builtin_HEXAGON_M4_or_andn:
   4402     ID = Intrinsic::hexagon_M4_or_andn; break;
   4403 
   4404   case Hexagon::BI__builtin_HEXAGON_S4_or_andix:
   4405     ID = Intrinsic::hexagon_S4_or_andix; break;
   4406 
   4407   case Hexagon::BI__builtin_HEXAGON_S4_or_andi:
   4408     ID = Intrinsic::hexagon_S4_or_andi; break;
   4409 
   4410   case Hexagon::BI__builtin_HEXAGON_S4_or_ori:
   4411     ID = Intrinsic::hexagon_S4_or_ori; break;
   4412 
   4413   case Hexagon::BI__builtin_HEXAGON_A4_modwrapu:
   4414     ID = Intrinsic::hexagon_A4_modwrapu; break;
   4415 
   4416   case Hexagon::BI__builtin_HEXAGON_A4_cround_rr:
   4417     ID = Intrinsic::hexagon_A4_cround_rr; break;
   4418 
   4419   case Hexagon::BI__builtin_HEXAGON_A4_round_ri:
   4420     ID = Intrinsic::hexagon_A4_round_ri; break;
   4421 
   4422   case Hexagon::BI__builtin_HEXAGON_A4_round_rr:
   4423     ID = Intrinsic::hexagon_A4_round_rr; break;
   4424 
   4425   case Hexagon::BI__builtin_HEXAGON_A4_round_ri_sat:
   4426     ID = Intrinsic::hexagon_A4_round_ri_sat; break;
   4427 
   4428   case Hexagon::BI__builtin_HEXAGON_A4_round_rr_sat:
   4429     ID = Intrinsic::hexagon_A4_round_rr_sat; break;
   4430 
   4431   }
   4432 
   4433   llvm::Function *F = CGM.getIntrinsic(ID);
   4434   return Builder.CreateCall(F, Ops, "");
   4435 }
   4436 
   4437 Value *CodeGenFunction::EmitPPCBuiltinExpr(unsigned BuiltinID,
   4438                                            const CallExpr *E) {
   4439   SmallVector<Value*, 4> Ops;
   4440 
   4441   for (unsigned i = 0, e = E->getNumArgs(); i != e; i++)
   4442     Ops.push_back(EmitScalarExpr(E->getArg(i)));
   4443 
   4444   Intrinsic::ID ID = Intrinsic::not_intrinsic;
   4445 
   4446   switch (BuiltinID) {
   4447   default: return 0;
   4448 
   4449   // vec_ld, vec_lvsl, vec_lvsr
   4450   case PPC::BI__builtin_altivec_lvx:
   4451   case PPC::BI__builtin_altivec_lvxl:
   4452   case PPC::BI__builtin_altivec_lvebx:
   4453   case PPC::BI__builtin_altivec_lvehx:
   4454   case PPC::BI__builtin_altivec_lvewx:
   4455   case PPC::BI__builtin_altivec_lvsl:
   4456   case PPC::BI__builtin_altivec_lvsr:
   4457   {
   4458     Ops[1] = Builder.CreateBitCast(Ops[1], Int8PtrTy);
   4459 
   4460     Ops[0] = Builder.CreateGEP(Ops[1], Ops[0]);
   4461     Ops.pop_back();
   4462 
   4463     switch (BuiltinID) {
   4464     default: llvm_unreachable("Unsupported ld/lvsl/lvsr intrinsic!");
   4465     case PPC::BI__builtin_altivec_lvx:
   4466       ID = Intrinsic::ppc_altivec_lvx;
   4467       break;
   4468     case PPC::BI__builtin_altivec_lvxl:
   4469       ID = Intrinsic::ppc_altivec_lvxl;
   4470       break;
   4471     case PPC::BI__builtin_altivec_lvebx:
   4472       ID = Intrinsic::ppc_altivec_lvebx;
   4473       break;
   4474     case PPC::BI__builtin_altivec_lvehx:
   4475       ID = Intrinsic::ppc_altivec_lvehx;
   4476       break;
   4477     case PPC::BI__builtin_altivec_lvewx:
   4478       ID = Intrinsic::ppc_altivec_lvewx;
   4479       break;
   4480     case PPC::BI__builtin_altivec_lvsl:
   4481       ID = Intrinsic::ppc_altivec_lvsl;
   4482       break;
   4483     case PPC::BI__builtin_altivec_lvsr:
   4484       ID = Intrinsic::ppc_altivec_lvsr;
   4485       break;
   4486     }
   4487     llvm::Function *F = CGM.getIntrinsic(ID);
   4488     return Builder.CreateCall(F, Ops, "");
   4489   }
   4490 
   4491   // vec_st
   4492   case PPC::BI__builtin_altivec_stvx:
   4493   case PPC::BI__builtin_altivec_stvxl:
   4494   case PPC::BI__builtin_altivec_stvebx:
   4495   case PPC::BI__builtin_altivec_stvehx:
   4496   case PPC::BI__builtin_altivec_stvewx:
   4497   {
   4498     Ops[2] = Builder.CreateBitCast(Ops[2], Int8PtrTy);
   4499     Ops[1] = Builder.CreateGEP(Ops[2], Ops[1]);
   4500     Ops.pop_back();
   4501 
   4502     switch (BuiltinID) {
   4503     default: llvm_unreachable("Unsupported st intrinsic!");
   4504     case PPC::BI__builtin_altivec_stvx:
   4505       ID = Intrinsic::ppc_altivec_stvx;
   4506       break;
   4507     case PPC::BI__builtin_altivec_stvxl:
   4508       ID = Intrinsic::ppc_altivec_stvxl;
   4509       break;
   4510     case PPC::BI__builtin_altivec_stvebx:
   4511       ID = Intrinsic::ppc_altivec_stvebx;
   4512       break;
   4513     case PPC::BI__builtin_altivec_stvehx:
   4514       ID = Intrinsic::ppc_altivec_stvehx;
   4515       break;
   4516     case PPC::BI__builtin_altivec_stvewx:
   4517       ID = Intrinsic::ppc_altivec_stvewx;
   4518       break;
   4519     }
   4520     llvm::Function *F = CGM.getIntrinsic(ID);
   4521     return Builder.CreateCall(F, Ops, "");
   4522   }
   4523   }
   4524 }
   4525