Home | History | Annotate | Download | only in OpenMP
      1 // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp -x c++ -emit-llvm %s -o - | FileCheck %s
      2 // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s
      3 // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s
      4 // expected-no-diagnostics
      5 // REQUIRES: x86-registered-target
      6 #ifndef HEADER
      7 #define HEADER
      8 
      9 // CHECK-DAG: [[IDENT_T:%.+]] = type { i32, i32, i32, i32, i8* }
     10 // CHECK-DAG: [[STRUCT_SHAREDS:%.+]] = type { i8*, [2 x [[STRUCT_S:%.+]]]* }
     11 // CHECK-DAG: [[STRUCT_SHAREDS1:%.+]] = type { [2 x [[STRUCT_S:%.+]]]* }
     12 // CHECK-DAG: [[KMP_TASK_T:%.+]] = type { i8*, i32 (i32, i8*)*, i32, %union{{.+}}, %union{{.+}} }
     13 // CHECK-DAG: [[KMP_DEPEND_INFO:%.+]] = type { i64, i64, i8 }
     14 struct S {
     15   int a;
     16   S() : a(0) {}
     17   S(const S &s) : a(s.a) {}
     18   ~S() {}
     19 };
     20 int a;
     21 // CHECK-LABEL: @main
     22 int main() {
     23 // CHECK: [[B:%.+]] = alloca i8
     24 // CHECK: [[S:%.+]] = alloca [2 x [[STRUCT_S]]]
     25   char b;
     26   S s[2];
     27   int arr[10][a];
     28 // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num([[IDENT_T]]* @{{.+}})
     29 // CHECK: [[B_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES:%.+]], i32 0, i32 0
     30 // CHECK: store i8* [[B]], i8** [[B_REF]]
     31 // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS]], [[STRUCT_SHAREDS]]* [[CAPTURES]], i32 0, i32 1
     32 // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]]
     33 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 33, i64 40, i64 16, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY1:@.+]] to i32 (i32, i8*)*))
     34 // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0
     35 // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]]
     36 // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS]]* [[CAPTURES]] to i8*
     37 // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* [[SHAREDS_REF]], i8* [[BITCAST]], i64 16, i32 8, i1 false)
     38 // CHECK: [[PRIORITY_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR]], i32 0, i32 4
     39 // CHECK: [[PRIORITY:%.+]] = bitcast %union{{.+}}* [[PRIORITY_REF_PTR]] to i32*
     40 // CHECK: store i32 {{.+}}, i32* [[PRIORITY]]
     41 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
     42 #pragma omp task shared(a, b, s) priority(b)
     43   {
     44     a = 15;
     45     b = a;
     46     s[0].a = 10;
     47   }
     48 // CHECK: [[S_REF:%.+]] = getelementptr inbounds [[STRUCT_SHAREDS1]], [[STRUCT_SHAREDS1]]* [[CAPTURES:%.+]], i32 0, i32 0
     49 // CHECK: store [2 x [[STRUCT_S]]]* [[S]], [2 x [[STRUCT_S]]]** [[S_REF]]
     50 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{[^,]+}}, i32 [[GTID]], i32 1, i64 40, i64 8,
     51 // CHECK: [[SHAREDS_REF_PTR:%.+]] = getelementptr inbounds [[KMP_TASK_T]], [[KMP_TASK_T]]* [[TASK_PTR:%.+]], i32 0, i32 0
     52 // CHECK: [[SHAREDS_REF:%.+]] = load i8*, i8** [[SHAREDS_REF_PTR]]
     53 // CHECK: [[BITCAST:%.+]] = bitcast [[STRUCT_SHAREDS1]]* [[CAPTURES]] to i8*
     54 // CHECK: call void @llvm.memcpy.p0i8.p0i8.i64(i8* [[SHAREDS_REF]], i8* [[BITCAST]], i64 8, i32 8, i1 false)
     55 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES:%.*]], i64 0, i64 0
     56 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
     57 // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64* [[T0]]
     58 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1
     59 // CHECK: store i64 4, i64* [[T0]]
     60 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2
     61 // CHECK: store i8 1, i8* [[T0]]
     62 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i64 0, i64 1
     63 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
     64 // CHECK: ptrtoint i8* [[B]] to i64
     65 // CHECK: store i64 %{{[^,]+}}, i64* [[T0]]
     66 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1
     67 // CHECK: store i64 1, i64* [[T0]]
     68 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2
     69 // CHECK: store i8 1, i8* [[T0]]
     70 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i64 0, i64 2
     71 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
     72 // CHECK: ptrtoint [2 x [[STRUCT_S]]]* [[S]] to i64
     73 // CHECK: store i64 %{{[^,]+}}, i64* [[T0]]
     74 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 1
     75 // CHECK: store i64 8, i64* [[T0]]
     76 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 2
     77 // CHECK: store i8 1, i8* [[T0]]
     78 // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL:%.+]]
     79 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
     80 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]]
     81 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
     82 // CHECK: [[END1:%.+]] = getelementptr i32, i32* [[END]], i32 1
     83 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START]] to i64
     84 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END1]] to i64
     85 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
     86 // CHECK: [[DEP:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 3
     87 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* [[DEP]], i32 0, i32 0
     88 // CHECK: [[T1:%.*]] = ptrtoint i32* [[START]] to i64
     89 // CHECK: store i64 [[T1]], i64* [[T0]]
     90 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
     91 // CHECK: store i64 [[SIZEOF]], i64* [[T0]]
     92 // CHECK: [[T0:%.*]] = getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
     93 // CHECK: store i8 1, i8* [[T0]]
     94 // CHECK: [[DEPS:%.*]] = getelementptr inbounds [4 x [[KMP_DEPEND_INFO]]], [4 x [[KMP_DEPEND_INFO]]]* [[DEPENDENCIES]], i32 0, i32 0
     95 // CHECK: bitcast [[KMP_DEPEND_INFO]]* [[DEPS]] to i8*
     96 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 4, i8* %{{[^,]+}}, i32 0, i8* null)
     97 #pragma omp task shared(a, s) depend(in : a, b, s, arr[:])
     98   {
     99     a = 15;
    100     s[1].a = 10;
    101   }
    102 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY2:@.+]] to i32 (i32, i8*)*))
    103 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
    104 #pragma omp task untied
    105   {
    106     a = 1;
    107   }
    108 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1,
    109 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 0
    110 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0
    111 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
    112 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64
    113 // CHECK: store i64 %{{[^,]+}}, i64*
    114 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
    115 // CHECK: store i64 4, i64*
    116 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
    117 // CHECK: store i8 3, i8*
    118 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
    119 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64
    120 // CHECK: [[IDX1:%.+]] = mul nsw i64 4, [[A_VAL]]
    121 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
    122 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 [[IDX2]]
    123 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
    124 // CHECK: [[IDX2:%.+]] = sext i8 [[B_VAL]] to i64
    125 // CHECK: [[IDX1:%.+]] = mul nsw i64 9, [[A_VAL]]
    126 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
    127 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]]
    128 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1
    129 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64
    130 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64
    131 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
    132 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1
    133 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
    134 // CHECK: ptrtoint i32* [[START1]] to i64
    135 // CHECK: store i64 %{{[^,]+}}, i64*
    136 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
    137 // CHECK: store i64 [[SIZEOF]], i64*
    138 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
    139 // CHECK: store i8 3, i8*
    140 // CHECK: getelementptr inbounds [2 x [[KMP_DEPEND_INFO]]], [2 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i32 0, i32 0
    141 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8*
    142 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 2, i8* %{{[^,]+}}, i32 0, i8* null)
    143 #pragma omp task untied depend(out : s[0], arr[4:][b])
    144   {
    145     a = 1;
    146   }
    147 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 3, i64 40, i64 1,
    148 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 0
    149 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
    150 // CHECK: store i64 ptrtoint (i32* @{{.+}} to i64), i64*
    151 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
    152 // CHECK: store i64 4, i64*
    153 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
    154 // CHECK: store i8 3, i8*
    155 // CHECK: getelementptr inbounds [2 x [[STRUCT_S]]], [2 x [[STRUCT_S]]]* [[S]], i64 0, i64 1
    156 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 1
    157 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
    158 // CHECK: ptrtoint [[STRUCT_S]]* %{{.+}} to i64
    159 // CHECK: store i64 %{{[^,]+}}, i64*
    160 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
    161 // CHECK: store i64 4, i64*
    162 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
    163 // CHECK: store i8 3, i8*
    164 // CHECK: [[IDX1:%.+]] = mul nsw i64 0, [[A_VAL]]
    165 // CHECK: [[START:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
    166 // CHECK: [[START1:%.+]] = getelementptr inbounds i32, i32* [[START]], i64 3
    167 // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}},
    168 // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64
    169 // CHECK: [[IDX2:%.+]] = sub nsw i64 [[NEW_A_VAL_I64]], 1
    170 // CHECK: [[NEW_A_VAL:%.+]] = load i32, i32* @{{.+}},
    171 // CHECK: [[NEW_A_VAL_I64:%.+]] = sext i32 [[NEW_A_VAL]] to i64
    172 // CHECK: [[SUB:%.+]] = add nsw i64 -1, [[NEW_A_VAL_I64]]
    173 // CHECK: [[IDX1:%.+]] = mul nsw i64 [[SUB]], [[A_VAL]]
    174 // CHECK: [[END:%.+]] = getelementptr inbounds i32, i32* %{{.+}}, i64 [[IDX1]]
    175 // CHECK: [[END1:%.+]] = getelementptr inbounds i32, i32* [[END]], i64 [[IDX2]]
    176 // CHECK: [[END2:%.+]] = getelementptr i32, i32* [[END1]], i32 1
    177 // CHECK: [[START_INT:%.+]] = ptrtoint i32* [[START1]] to i64
    178 // CHECK: [[END_INT:%.+]] = ptrtoint i32* [[END2]] to i64
    179 // CHECK: [[SIZEOF:%.+]] = sub nuw i64 [[END_INT]], [[START_INT]]
    180 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i64 0, i64 2
    181 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 0
    182 // CHECK: ptrtoint i32* [[START1]] to i64
    183 // CHECK: store i64 %{{[^,]+}}, i64*
    184 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 1
    185 // CHECK: store i64 [[SIZEOF]], i64*
    186 // CHECK: getelementptr inbounds [[KMP_DEPEND_INFO]], [[KMP_DEPEND_INFO]]* %{{[^,]+}}, i32 0, i32 2
    187 // CHECK: store i8 3, i8*
    188 // CHECK: getelementptr inbounds [3 x [[KMP_DEPEND_INFO]]], [3 x [[KMP_DEPEND_INFO]]]* %{{[^,]+}}, i32 0, i32 0
    189 // CHECK: bitcast [[KMP_DEPEND_INFO]]* %{{.+}} to i8*
    190 // CHECK: call i32 @__kmpc_omp_task_with_deps([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]], i32 3, i8* %{{[^,]+}}, i32 0, i8* null)
    191 #pragma omp task final(true) depend(inout: a, s[1], arr[:a][3:])
    192   {
    193     a = 2;
    194   }
    195 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 3, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY3:@.+]] to i32 (i32, i8*)*))
    196 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
    197 #pragma omp task final(true)
    198   {
    199     a = 2;
    200   }
    201 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 1, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY4:@.+]] to i32 (i32, i8*)*))
    202 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
    203   const bool flag = false;
    204 #pragma omp task final(flag)
    205   {
    206     a = 3;
    207   }
    208 // CHECK: [[B_VAL:%.+]] = load i8, i8* [[B]]
    209 // CHECK: [[CMP:%.+]] = icmp ne i8 [[B_VAL]], 0
    210 // CHECK: [[FINAL:%.+]] = select i1 [[CMP]], i32 2, i32 0
    211 // CHECK: [[FLAGS:%.+]] = or i32 [[FINAL]], 1
    212 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 [[FLAGS]], i64 40, i64 8, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY5:@.+]] to i32 (i32, i8*)*))
    213 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
    214   int c __attribute__((aligned(128)));
    215 #pragma omp task final(b) shared(c)
    216   {
    217     a = 4;
    218     c = 5;
    219   }
    220 // CHECK: [[ORIG_TASK_PTR:%.+]] = call i8* @__kmpc_omp_task_alloc([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i32 0, i64 40, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[KMP_TASK_T]]{{.*}}*)* [[TASK_ENTRY6:@.+]] to i32 (i32, i8*)*))
    221 // CHECK: call i32 @__kmpc_omp_task([[IDENT_T]]* @{{.+}}, i32 [[GTID]], i8* [[ORIG_TASK_PTR]])
    222 #pragma omp task untied
    223   {
    224     S s1;
    225 #pragma omp task
    226     a = 4;
    227 #pragma omp taskyield
    228     s1 = S();
    229 #pragma omp taskwait
    230   }
    231   return a;
    232 }
    233 // CHECK: define internal i32 [[TASK_ENTRY1]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
    234 // CHECK: store i32 15, i32* [[A_PTR:@.+]]
    235 // CHECK: [[A_VAL:%.+]] = load i32, i32* [[A_PTR]]
    236 // CHECK: [[A_VAL_I8:%.+]] = trunc i32 [[A_VAL]] to i8
    237 // CHECK: store i8 [[A_VAL_I8]], i8* %{{.+}}
    238 // CHECK: store i32 10, i32* %{{.+}}
    239 
    240 // CHECK: define internal i32 [[TASK_ENTRY2]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
    241 // CHECK: store i32 1, i32* [[A_PTR]]
    242 
    243 // CHECK: define internal i32 [[TASK_ENTRY3]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
    244 // CHECK: store i32 2, i32* [[A_PTR]]
    245 
    246 // CHECK: define internal i32 [[TASK_ENTRY4]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
    247 // CHECK: store i32 3, i32* [[A_PTR]]
    248 
    249 // CHECK: define internal i32 [[TASK_ENTRY5]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
    250 // CHECK: store i32 4, i32* [[A_PTR]]
    251 // CHECK: store i32 5, i32* [[C_PTR:%.+]], align 128
    252 
    253 // CHECK: define internal i32
    254 // CHECK: store i32 4, i32* [[A_PTR]]
    255 
    256 // CHECK: define internal i32 [[TASK_ENTRY6]](i32, [[KMP_TASK_T]]{{.*}}* noalias)
    257 // CHECK: switch i32 %{{.+}}, label
    258 // CHECK: load i32*, i32** %
    259 // CHECK: store i32 1, i32* %
    260 // CHECK: call i32 @__kmpc_omp_task(%
    261 
    262 // CHECK: call i8* @__kmpc_omp_task_alloc(
    263 // CHECK: call i32 @__kmpc_omp_task(%
    264 // CHECK: load i32*, i32** %
    265 // CHECK: store i32 2, i32* %
    266 // CHECK: call i32 @__kmpc_omp_task(%
    267 
    268 // CHECK: call i32 @__kmpc_omp_taskyield(%
    269 // CHECK: load i32*, i32** %
    270 // CHECK: store i32 3, i32* %
    271 // CHECK: call i32 @__kmpc_omp_task(%
    272 
    273 // CHECK: call i32 @__kmpc_omp_taskwait(%
    274 // CHECK: load i32*, i32** %
    275 // CHECK: store i32 4, i32* %
    276 // CHECK: call i32 @__kmpc_omp_task(%
    277 #endif
    278 
    279