Home | History | Annotate | Download | only in CodeGen
      1 // RUN: %clang_cc1 -triple "i686-unknown-unknown"   -emit-llvm -x c %s -o - -O3 | FileCheck %s
      2 // RUN: %clang_cc1 -triple "x86_64-unknown-unknown" -emit-llvm -x c %s -o - -O3 | FileCheck %s
      3 // RUN: %clang_cc1 -triple "x86_64-mingw32"         -emit-llvm -x c %s -o - -O3 | FileCheck %s
      4 
      5 unsigned short test_addcs(unsigned short x, unsigned short y,
      6                           unsigned short carryin, unsigned short *z) {
      7   // CHECK: @test_addcs
      8   // CHECK: %{{.+}} = {{.*}} call { i16, i1 } @llvm.uadd.with.overflow.i16(i16 %x, i16 %y)
      9   // CHECK: %{{.+}} = extractvalue { i16, i1 } %{{.+}}, 1
     10   // CHECK: %{{.+}} = extractvalue { i16, i1 } %{{.+}}, 0
     11   // CHECK: %{{.+}} = {{.*}} call { i16, i1 } @llvm.uadd.with.overflow.i16(i16 %{{.+}}, i16 %carryin)
     12   // CHECK: %{{.+}} = extractvalue { i16, i1 } %{{.+}}, 1
     13   // CHECK: %{{.+}} = extractvalue { i16, i1 } %{{.+}}, 0
     14   // CHECK: %{{.+}} = or i1 %{{.+}}, %{{.+}}
     15   // CHECK: %{{.+}} = zext i1 %{{.+}} to i16
     16   // CHECK: store i16 %{{.+}}, i16* %z, align 2
     17 
     18   unsigned short carryout;
     19   *z = __builtin_addcs(x, y, carryin, &carryout);
     20 
     21   return carryout;
     22 }
     23 
     24 unsigned test_addc(unsigned x, unsigned y, unsigned carryin, unsigned *z) {
     25   // CHECK: @test_addc
     26   // CHECK: %{{.+}} = {{.*}} call { i32, i1 } @llvm.uadd.with.overflow.i32(i32 %x, i32 %y)
     27   // CHECK: %{{.+}} = extractvalue { i32, i1 } %{{.+}}, 1
     28   // CHECK: %{{.+}} = extractvalue { i32, i1 } %{{.+}}, 0
     29   // CHECK: %{{.+}} = {{.*}} call { i32, i1 } @llvm.uadd.with.overflow.i32(i32 %{{.+}}, i32 %carryin)
     30   // CHECK: %{{.+}} = extractvalue { i32, i1 } %{{.+}}, 1
     31   // CHECK: %{{.+}} = extractvalue { i32, i1 } %{{.+}}, 0
     32   // CHECK: %{{.+}} = or i1 %{{.+}}, %{{.+}}
     33   // CHECK: %{{.+}} = zext i1 %{{.+}} to i32
     34   // CHECK: store i32 %{{.+}}, i32* %z, align 4
     35   unsigned carryout;
     36   *z = __builtin_addc(x, y, carryin, &carryout);
     37 
     38   return carryout;
     39 }
     40 
     41 unsigned long test_addcl(unsigned long x, unsigned long y,
     42                          unsigned long carryin, unsigned long *z) {
     43   // long is i32 on i686, i64 on x86_64.
     44   // CHECK: @test_addcl([[UL:i32|i64]] %x
     45   // CHECK: %{{.+}} = {{.*}} call { [[UL]], i1 } @llvm.uadd.with.overflow.[[UL]]([[UL]] %x, [[UL]] %y)
     46   // CHECK: %{{.+}} = extractvalue { [[UL]], i1 } %{{.+}}, 1
     47   // CHECK: %{{.+}} = extractvalue { [[UL]], i1 } %{{.+}}, 0
     48   // CHECK: %{{.+}} = {{.*}} call { [[UL]], i1 } @llvm.uadd.with.overflow.[[UL]]([[UL]] %{{.+}}, [[UL]] %carryin)
     49   // CHECK: %{{.+}} = extractvalue { [[UL]], i1 } %{{.+}}, 1
     50   // CHECK: %{{.+}} = extractvalue { [[UL]], i1 } %{{.+}}, 0
     51   // CHECK: %{{.+}} = or i1 %{{.+}}, %{{.+}}
     52   // CHECK: %{{.+}} = zext i1 %{{.+}} to [[UL]]
     53   // CHECK: store [[UL]] %{{.+}}, [[UL]]* %z
     54   unsigned long carryout;
     55   *z = __builtin_addcl(x, y, carryin, &carryout);
     56 
     57   return carryout;
     58 }
     59 
     60 unsigned long long test_addcll(unsigned long long x, unsigned long long y,
     61                                unsigned long long carryin,
     62                                unsigned long long *z) {
     63   // CHECK: @test_addcll
     64   // CHECK: %{{.+}} = {{.*}} call { i64, i1 } @llvm.uadd.with.overflow.i64(i64 %x, i64 %y)
     65   // CHECK: %{{.+}} = extractvalue { i64, i1 } %{{.+}}, 1
     66   // CHECK: %{{.+}} = extractvalue { i64, i1 } %{{.+}}, 0
     67   // CHECK: %{{.+}} = {{.*}} call { i64, i1 } @llvm.uadd.with.overflow.i64(i64 %{{.+}}, i64 %carryin)
     68   // CHECK: %{{.+}} = extractvalue { i64, i1 } %{{.+}}, 1
     69   // CHECK: %{{.+}} = extractvalue { i64, i1 } %{{.+}}, 0
     70   // CHECK: %{{.+}} = or i1 %{{.+}}, %{{.+}}
     71   // CHECK: %{{.+}} = zext i1 %{{.+}} to i64
     72   // CHECK: store i64 %{{.+}}, i64* %z
     73   unsigned long long carryout;
     74   *z = __builtin_addcll(x, y, carryin, &carryout);
     75 
     76   return carryout;
     77 }
     78 
     79 unsigned short test_subcs(unsigned short x, unsigned short y,
     80                           unsigned short carryin, unsigned short *z) {
     81   // CHECK: @test_subcs
     82   // CHECK: %{{.+}} = {{.*}} call { i16, i1 } @llvm.usub.with.overflow.i16(i16 %x, i16 %y)
     83   // CHECK: %{{.+}} = extractvalue { i16, i1 } %{{.+}}, 1
     84   // CHECK: %{{.+}} = extractvalue { i16, i1 } %{{.+}}, 0
     85   // CHECK: %{{.+}} = {{.*}} call { i16, i1 } @llvm.usub.with.overflow.i16(i16 %{{.+}}, i16 %carryin)
     86   // CHECK: %{{.+}} = extractvalue { i16, i1 } %{{.+}}, 1
     87   // CHECK: %{{.+}} = extractvalue { i16, i1 } %{{.+}}, 0
     88   // CHECK: %{{.+}} = or i1 %{{.+}}, %{{.+}}
     89   // CHECK: %{{.+}} = zext i1 %{{.+}} to i16
     90   // CHECK: store i16 %{{.+}}, i16* %z, align 2
     91 
     92   unsigned short carryout;
     93   *z = __builtin_subcs(x, y, carryin, &carryout);
     94 
     95   return carryout;
     96 }
     97 
     98 unsigned test_subc(unsigned x, unsigned y, unsigned carryin, unsigned *z) {
     99   // CHECK: @test_subc
    100   // CHECK: %{{.+}} = {{.*}} call { i32, i1 } @llvm.usub.with.overflow.i32(i32 %x, i32 %y)
    101   // CHECK: %{{.+}} = extractvalue { i32, i1 } %{{.+}}, 1
    102   // CHECK: %{{.+}} = extractvalue { i32, i1 } %{{.+}}, 0
    103   // CHECK: %{{.+}} = {{.*}} call { i32, i1 } @llvm.usub.with.overflow.i32(i32 %{{.+}}, i32 %carryin)
    104   // CHECK: %{{.+}} = extractvalue { i32, i1 } %{{.+}}, 1
    105   // CHECK: %{{.+}} = extractvalue { i32, i1 } %{{.+}}, 0
    106   // CHECK: %{{.+}} = or i1 %{{.+}}, %{{.+}}
    107   // CHECK: %{{.+}} = zext i1 %{{.+}} to i32
    108   // CHECK: store i32 %{{.+}}, i32* %z, align 4
    109   unsigned carryout;
    110   *z = __builtin_subc(x, y, carryin, &carryout);
    111 
    112   return carryout;
    113 }
    114 
    115 unsigned long test_subcl(unsigned long x, unsigned long y,
    116                          unsigned long carryin, unsigned long *z) {
    117   // CHECK: @test_subcl([[UL:i32|i64]] %x
    118   // CHECK: %{{.+}} = {{.*}} call { [[UL]], i1 } @llvm.usub.with.overflow.[[UL]]([[UL]] %x, [[UL]] %y)
    119   // CHECK: %{{.+}} = extractvalue { [[UL]], i1 } %{{.+}}, 1
    120   // CHECK: %{{.+}} = extractvalue { [[UL]], i1 } %{{.+}}, 0
    121   // CHECK: %{{.+}} = {{.*}} call { [[UL]], i1 } @llvm.usub.with.overflow.[[UL]]([[UL]] %{{.+}}, [[UL]] %carryin)
    122   // CHECK: %{{.+}} = extractvalue { [[UL]], i1 } %{{.+}}, 1
    123   // CHECK: %{{.+}} = extractvalue { [[UL]], i1 } %{{.+}}, 0
    124   // CHECK: %{{.+}} = or i1 %{{.+}}, %{{.+}}
    125   // CHECK: %{{.+}} = zext i1 %{{.+}} to [[UL]]
    126   // CHECK: store [[UL]] %{{.+}}, [[UL]]* %z
    127   unsigned long carryout;
    128   *z = __builtin_subcl(x, y, carryin, &carryout);
    129 
    130   return carryout;
    131 }
    132 
    133 unsigned long long test_subcll(unsigned long long x, unsigned long long y,
    134                                unsigned long long carryin,
    135                                unsigned long long *z) {
    136   // CHECK: @test_subcll
    137   // CHECK: %{{.+}} = {{.*}} call { i64, i1 } @llvm.usub.with.overflow.i64(i64 %x, i64 %y)
    138   // CHECK: %{{.+}} = extractvalue { i64, i1 } %{{.+}}, 1
    139   // CHECK: %{{.+}} = extractvalue { i64, i1 } %{{.+}}, 0
    140   // CHECK: %{{.+}} = {{.*}} call { i64, i1 } @llvm.usub.with.overflow.i64(i64 %{{.+}}, i64 %carryin)
    141   // CHECK: %{{.+}} = extractvalue { i64, i1 } %{{.+}}, 1
    142   // CHECK: %{{.+}} = extractvalue { i64, i1 } %{{.+}}, 0
    143   // CHECK: %{{.+}} = or i1 %{{.+}}, %{{.+}}
    144   // CHECK: %{{.+}} = zext i1 %{{.+}} to i64
    145   // CHECK: store i64 %{{.+}}, i64* %z
    146   unsigned long long carryout;
    147   *z = __builtin_subcll(x, y, carryin, &carryout);
    148 
    149   return carryout;
    150 }
    151