Home | History | Annotate | Download | only in ThreadSanitizer
      1 ; RUN: opt < %s -tsan -S | FileCheck %s
      2 ; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime.
      3 target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
      4 
      5 define i8 @atomic8_load_unordered(i8* %a) nounwind uwtable {
      6 entry:
      7   %0 = load atomic i8* %a unordered, align 1
      8   ret i8 %0
      9 }
     10 ; CHECK: atomic8_load_unordered
     11 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0)
     12 
     13 define i8 @atomic8_load_monotonic(i8* %a) nounwind uwtable {
     14 entry:
     15   %0 = load atomic i8* %a monotonic, align 1
     16   ret i8 %0
     17 }
     18 ; CHECK: atomic8_load_monotonic
     19 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0)
     20 
     21 define i8 @atomic8_load_acquire(i8* %a) nounwind uwtable {
     22 entry:
     23   %0 = load atomic i8* %a acquire, align 1
     24   ret i8 %0
     25 }
     26 ; CHECK: atomic8_load_acquire
     27 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 2)
     28 
     29 define i8 @atomic8_load_seq_cst(i8* %a) nounwind uwtable {
     30 entry:
     31   %0 = load atomic i8* %a seq_cst, align 1
     32   ret i8 %0
     33 }
     34 ; CHECK: atomic8_load_seq_cst
     35 ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 5)
     36 
     37 define void @atomic8_store_unordered(i8* %a) nounwind uwtable {
     38 entry:
     39   store atomic i8 0, i8* %a unordered, align 1
     40   ret void
     41 }
     42 ; CHECK: atomic8_store_unordered
     43 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0)
     44 
     45 define void @atomic8_store_monotonic(i8* %a) nounwind uwtable {
     46 entry:
     47   store atomic i8 0, i8* %a monotonic, align 1
     48   ret void
     49 }
     50 ; CHECK: atomic8_store_monotonic
     51 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0)
     52 
     53 define void @atomic8_store_release(i8* %a) nounwind uwtable {
     54 entry:
     55   store atomic i8 0, i8* %a release, align 1
     56   ret void
     57 }
     58 ; CHECK: atomic8_store_release
     59 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 3)
     60 
     61 define void @atomic8_store_seq_cst(i8* %a) nounwind uwtable {
     62 entry:
     63   store atomic i8 0, i8* %a seq_cst, align 1
     64   ret void
     65 }
     66 ; CHECK: atomic8_store_seq_cst
     67 ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 5)
     68 
     69 define void @atomic8_xchg_monotonic(i8* %a) nounwind uwtable {
     70 entry:
     71   atomicrmw xchg i8* %a, i8 0 monotonic
     72   ret void
     73 }
     74 ; CHECK: atomic8_xchg_monotonic
     75 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 0)
     76 
     77 define void @atomic8_add_monotonic(i8* %a) nounwind uwtable {
     78 entry:
     79   atomicrmw add i8* %a, i8 0 monotonic
     80   ret void
     81 }
     82 ; CHECK: atomic8_add_monotonic
     83 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 0)
     84 
     85 define void @atomic8_sub_monotonic(i8* %a) nounwind uwtable {
     86 entry:
     87   atomicrmw sub i8* %a, i8 0 monotonic
     88   ret void
     89 }
     90 ; CHECK: atomic8_sub_monotonic
     91 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 0)
     92 
     93 define void @atomic8_and_monotonic(i8* %a) nounwind uwtable {
     94 entry:
     95   atomicrmw and i8* %a, i8 0 monotonic
     96   ret void
     97 }
     98 ; CHECK: atomic8_and_monotonic
     99 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 0)
    100 
    101 define void @atomic8_or_monotonic(i8* %a) nounwind uwtable {
    102 entry:
    103   atomicrmw or i8* %a, i8 0 monotonic
    104   ret void
    105 }
    106 ; CHECK: atomic8_or_monotonic
    107 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 0)
    108 
    109 define void @atomic8_xor_monotonic(i8* %a) nounwind uwtable {
    110 entry:
    111   atomicrmw xor i8* %a, i8 0 monotonic
    112   ret void
    113 }
    114 ; CHECK: atomic8_xor_monotonic
    115 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 0)
    116 
    117 define void @atomic8_nand_monotonic(i8* %a) nounwind uwtable {
    118 entry:
    119   atomicrmw nand i8* %a, i8 0 monotonic
    120   ret void
    121 }
    122 ; CHECK: atomic8_nand_monotonic
    123 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 0)
    124 
    125 define void @atomic8_xchg_acquire(i8* %a) nounwind uwtable {
    126 entry:
    127   atomicrmw xchg i8* %a, i8 0 acquire
    128   ret void
    129 }
    130 ; CHECK: atomic8_xchg_acquire
    131 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 2)
    132 
    133 define void @atomic8_add_acquire(i8* %a) nounwind uwtable {
    134 entry:
    135   atomicrmw add i8* %a, i8 0 acquire
    136   ret void
    137 }
    138 ; CHECK: atomic8_add_acquire
    139 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 2)
    140 
    141 define void @atomic8_sub_acquire(i8* %a) nounwind uwtable {
    142 entry:
    143   atomicrmw sub i8* %a, i8 0 acquire
    144   ret void
    145 }
    146 ; CHECK: atomic8_sub_acquire
    147 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 2)
    148 
    149 define void @atomic8_and_acquire(i8* %a) nounwind uwtable {
    150 entry:
    151   atomicrmw and i8* %a, i8 0 acquire
    152   ret void
    153 }
    154 ; CHECK: atomic8_and_acquire
    155 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 2)
    156 
    157 define void @atomic8_or_acquire(i8* %a) nounwind uwtable {
    158 entry:
    159   atomicrmw or i8* %a, i8 0 acquire
    160   ret void
    161 }
    162 ; CHECK: atomic8_or_acquire
    163 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 2)
    164 
    165 define void @atomic8_xor_acquire(i8* %a) nounwind uwtable {
    166 entry:
    167   atomicrmw xor i8* %a, i8 0 acquire
    168   ret void
    169 }
    170 ; CHECK: atomic8_xor_acquire
    171 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 2)
    172 
    173 define void @atomic8_nand_acquire(i8* %a) nounwind uwtable {
    174 entry:
    175   atomicrmw nand i8* %a, i8 0 acquire
    176   ret void
    177 }
    178 ; CHECK: atomic8_nand_acquire
    179 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 2)
    180 
    181 define void @atomic8_xchg_release(i8* %a) nounwind uwtable {
    182 entry:
    183   atomicrmw xchg i8* %a, i8 0 release
    184   ret void
    185 }
    186 ; CHECK: atomic8_xchg_release
    187 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 3)
    188 
    189 define void @atomic8_add_release(i8* %a) nounwind uwtable {
    190 entry:
    191   atomicrmw add i8* %a, i8 0 release
    192   ret void
    193 }
    194 ; CHECK: atomic8_add_release
    195 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 3)
    196 
    197 define void @atomic8_sub_release(i8* %a) nounwind uwtable {
    198 entry:
    199   atomicrmw sub i8* %a, i8 0 release
    200   ret void
    201 }
    202 ; CHECK: atomic8_sub_release
    203 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 3)
    204 
    205 define void @atomic8_and_release(i8* %a) nounwind uwtable {
    206 entry:
    207   atomicrmw and i8* %a, i8 0 release
    208   ret void
    209 }
    210 ; CHECK: atomic8_and_release
    211 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 3)
    212 
    213 define void @atomic8_or_release(i8* %a) nounwind uwtable {
    214 entry:
    215   atomicrmw or i8* %a, i8 0 release
    216   ret void
    217 }
    218 ; CHECK: atomic8_or_release
    219 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 3)
    220 
    221 define void @atomic8_xor_release(i8* %a) nounwind uwtable {
    222 entry:
    223   atomicrmw xor i8* %a, i8 0 release
    224   ret void
    225 }
    226 ; CHECK: atomic8_xor_release
    227 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 3)
    228 
    229 define void @atomic8_nand_release(i8* %a) nounwind uwtable {
    230 entry:
    231   atomicrmw nand i8* %a, i8 0 release
    232   ret void
    233 }
    234 ; CHECK: atomic8_nand_release
    235 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 3)
    236 
    237 define void @atomic8_xchg_acq_rel(i8* %a) nounwind uwtable {
    238 entry:
    239   atomicrmw xchg i8* %a, i8 0 acq_rel
    240   ret void
    241 }
    242 ; CHECK: atomic8_xchg_acq_rel
    243 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 4)
    244 
    245 define void @atomic8_add_acq_rel(i8* %a) nounwind uwtable {
    246 entry:
    247   atomicrmw add i8* %a, i8 0 acq_rel
    248   ret void
    249 }
    250 ; CHECK: atomic8_add_acq_rel
    251 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 4)
    252 
    253 define void @atomic8_sub_acq_rel(i8* %a) nounwind uwtable {
    254 entry:
    255   atomicrmw sub i8* %a, i8 0 acq_rel
    256   ret void
    257 }
    258 ; CHECK: atomic8_sub_acq_rel
    259 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 4)
    260 
    261 define void @atomic8_and_acq_rel(i8* %a) nounwind uwtable {
    262 entry:
    263   atomicrmw and i8* %a, i8 0 acq_rel
    264   ret void
    265 }
    266 ; CHECK: atomic8_and_acq_rel
    267 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 4)
    268 
    269 define void @atomic8_or_acq_rel(i8* %a) nounwind uwtable {
    270 entry:
    271   atomicrmw or i8* %a, i8 0 acq_rel
    272   ret void
    273 }
    274 ; CHECK: atomic8_or_acq_rel
    275 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 4)
    276 
    277 define void @atomic8_xor_acq_rel(i8* %a) nounwind uwtable {
    278 entry:
    279   atomicrmw xor i8* %a, i8 0 acq_rel
    280   ret void
    281 }
    282 ; CHECK: atomic8_xor_acq_rel
    283 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 4)
    284 
    285 define void @atomic8_nand_acq_rel(i8* %a) nounwind uwtable {
    286 entry:
    287   atomicrmw nand i8* %a, i8 0 acq_rel
    288   ret void
    289 }
    290 ; CHECK: atomic8_nand_acq_rel
    291 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 4)
    292 
    293 define void @atomic8_xchg_seq_cst(i8* %a) nounwind uwtable {
    294 entry:
    295   atomicrmw xchg i8* %a, i8 0 seq_cst
    296   ret void
    297 }
    298 ; CHECK: atomic8_xchg_seq_cst
    299 ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 5)
    300 
    301 define void @atomic8_add_seq_cst(i8* %a) nounwind uwtable {
    302 entry:
    303   atomicrmw add i8* %a, i8 0 seq_cst
    304   ret void
    305 }
    306 ; CHECK: atomic8_add_seq_cst
    307 ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 5)
    308 
    309 define void @atomic8_sub_seq_cst(i8* %a) nounwind uwtable {
    310 entry:
    311   atomicrmw sub i8* %a, i8 0 seq_cst
    312   ret void
    313 }
    314 ; CHECK: atomic8_sub_seq_cst
    315 ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 5)
    316 
    317 define void @atomic8_and_seq_cst(i8* %a) nounwind uwtable {
    318 entry:
    319   atomicrmw and i8* %a, i8 0 seq_cst
    320   ret void
    321 }
    322 ; CHECK: atomic8_and_seq_cst
    323 ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 5)
    324 
    325 define void @atomic8_or_seq_cst(i8* %a) nounwind uwtable {
    326 entry:
    327   atomicrmw or i8* %a, i8 0 seq_cst
    328   ret void
    329 }
    330 ; CHECK: atomic8_or_seq_cst
    331 ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 5)
    332 
    333 define void @atomic8_xor_seq_cst(i8* %a) nounwind uwtable {
    334 entry:
    335   atomicrmw xor i8* %a, i8 0 seq_cst
    336   ret void
    337 }
    338 ; CHECK: atomic8_xor_seq_cst
    339 ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 5)
    340 
    341 define void @atomic8_nand_seq_cst(i8* %a) nounwind uwtable {
    342 entry:
    343   atomicrmw nand i8* %a, i8 0 seq_cst
    344   ret void
    345 }
    346 ; CHECK: atomic8_nand_seq_cst
    347 ; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 5)
    348 
    349 define void @atomic8_cas_monotonic(i8* %a) nounwind uwtable {
    350 entry:
    351   cmpxchg i8* %a, i8 0, i8 1 monotonic monotonic
    352   ret void
    353 }
    354 ; CHECK: atomic8_cas_monotonic
    355 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 0, i32 0)
    356 
    357 define void @atomic8_cas_acquire(i8* %a) nounwind uwtable {
    358 entry:
    359   cmpxchg i8* %a, i8 0, i8 1 acquire acquire
    360   ret void
    361 }
    362 ; CHECK: atomic8_cas_acquire
    363 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 2, i32 2)
    364 
    365 define void @atomic8_cas_release(i8* %a) nounwind uwtable {
    366 entry:
    367   cmpxchg i8* %a, i8 0, i8 1 release monotonic
    368   ret void
    369 }
    370 ; CHECK: atomic8_cas_release
    371 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 3, i32 0)
    372 
    373 define void @atomic8_cas_acq_rel(i8* %a) nounwind uwtable {
    374 entry:
    375   cmpxchg i8* %a, i8 0, i8 1 acq_rel acquire
    376   ret void
    377 }
    378 ; CHECK: atomic8_cas_acq_rel
    379 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 4, i32 2)
    380 
    381 define void @atomic8_cas_seq_cst(i8* %a) nounwind uwtable {
    382 entry:
    383   cmpxchg i8* %a, i8 0, i8 1 seq_cst seq_cst
    384   ret void
    385 }
    386 ; CHECK: atomic8_cas_seq_cst
    387 ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 5, i32 5)
    388 
    389 define i16 @atomic16_load_unordered(i16* %a) nounwind uwtable {
    390 entry:
    391   %0 = load atomic i16* %a unordered, align 2
    392   ret i16 %0
    393 }
    394 ; CHECK: atomic16_load_unordered
    395 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0)
    396 
    397 define i16 @atomic16_load_monotonic(i16* %a) nounwind uwtable {
    398 entry:
    399   %0 = load atomic i16* %a monotonic, align 2
    400   ret i16 %0
    401 }
    402 ; CHECK: atomic16_load_monotonic
    403 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0)
    404 
    405 define i16 @atomic16_load_acquire(i16* %a) nounwind uwtable {
    406 entry:
    407   %0 = load atomic i16* %a acquire, align 2
    408   ret i16 %0
    409 }
    410 ; CHECK: atomic16_load_acquire
    411 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 2)
    412 
    413 define i16 @atomic16_load_seq_cst(i16* %a) nounwind uwtable {
    414 entry:
    415   %0 = load atomic i16* %a seq_cst, align 2
    416   ret i16 %0
    417 }
    418 ; CHECK: atomic16_load_seq_cst
    419 ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 5)
    420 
    421 define void @atomic16_store_unordered(i16* %a) nounwind uwtable {
    422 entry:
    423   store atomic i16 0, i16* %a unordered, align 2
    424   ret void
    425 }
    426 ; CHECK: atomic16_store_unordered
    427 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0)
    428 
    429 define void @atomic16_store_monotonic(i16* %a) nounwind uwtable {
    430 entry:
    431   store atomic i16 0, i16* %a monotonic, align 2
    432   ret void
    433 }
    434 ; CHECK: atomic16_store_monotonic
    435 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0)
    436 
    437 define void @atomic16_store_release(i16* %a) nounwind uwtable {
    438 entry:
    439   store atomic i16 0, i16* %a release, align 2
    440   ret void
    441 }
    442 ; CHECK: atomic16_store_release
    443 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 3)
    444 
    445 define void @atomic16_store_seq_cst(i16* %a) nounwind uwtable {
    446 entry:
    447   store atomic i16 0, i16* %a seq_cst, align 2
    448   ret void
    449 }
    450 ; CHECK: atomic16_store_seq_cst
    451 ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 5)
    452 
    453 define void @atomic16_xchg_monotonic(i16* %a) nounwind uwtable {
    454 entry:
    455   atomicrmw xchg i16* %a, i16 0 monotonic
    456   ret void
    457 }
    458 ; CHECK: atomic16_xchg_monotonic
    459 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 0)
    460 
    461 define void @atomic16_add_monotonic(i16* %a) nounwind uwtable {
    462 entry:
    463   atomicrmw add i16* %a, i16 0 monotonic
    464   ret void
    465 }
    466 ; CHECK: atomic16_add_monotonic
    467 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 0)
    468 
    469 define void @atomic16_sub_monotonic(i16* %a) nounwind uwtable {
    470 entry:
    471   atomicrmw sub i16* %a, i16 0 monotonic
    472   ret void
    473 }
    474 ; CHECK: atomic16_sub_monotonic
    475 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 0)
    476 
    477 define void @atomic16_and_monotonic(i16* %a) nounwind uwtable {
    478 entry:
    479   atomicrmw and i16* %a, i16 0 monotonic
    480   ret void
    481 }
    482 ; CHECK: atomic16_and_monotonic
    483 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 0)
    484 
    485 define void @atomic16_or_monotonic(i16* %a) nounwind uwtable {
    486 entry:
    487   atomicrmw or i16* %a, i16 0 monotonic
    488   ret void
    489 }
    490 ; CHECK: atomic16_or_monotonic
    491 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 0)
    492 
    493 define void @atomic16_xor_monotonic(i16* %a) nounwind uwtable {
    494 entry:
    495   atomicrmw xor i16* %a, i16 0 monotonic
    496   ret void
    497 }
    498 ; CHECK: atomic16_xor_monotonic
    499 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 0)
    500 
    501 define void @atomic16_nand_monotonic(i16* %a) nounwind uwtable {
    502 entry:
    503   atomicrmw nand i16* %a, i16 0 monotonic
    504   ret void
    505 }
    506 ; CHECK: atomic16_nand_monotonic
    507 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 0)
    508 
    509 define void @atomic16_xchg_acquire(i16* %a) nounwind uwtable {
    510 entry:
    511   atomicrmw xchg i16* %a, i16 0 acquire
    512   ret void
    513 }
    514 ; CHECK: atomic16_xchg_acquire
    515 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 2)
    516 
    517 define void @atomic16_add_acquire(i16* %a) nounwind uwtable {
    518 entry:
    519   atomicrmw add i16* %a, i16 0 acquire
    520   ret void
    521 }
    522 ; CHECK: atomic16_add_acquire
    523 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 2)
    524 
    525 define void @atomic16_sub_acquire(i16* %a) nounwind uwtable {
    526 entry:
    527   atomicrmw sub i16* %a, i16 0 acquire
    528   ret void
    529 }
    530 ; CHECK: atomic16_sub_acquire
    531 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 2)
    532 
    533 define void @atomic16_and_acquire(i16* %a) nounwind uwtable {
    534 entry:
    535   atomicrmw and i16* %a, i16 0 acquire
    536   ret void
    537 }
    538 ; CHECK: atomic16_and_acquire
    539 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 2)
    540 
    541 define void @atomic16_or_acquire(i16* %a) nounwind uwtable {
    542 entry:
    543   atomicrmw or i16* %a, i16 0 acquire
    544   ret void
    545 }
    546 ; CHECK: atomic16_or_acquire
    547 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 2)
    548 
    549 define void @atomic16_xor_acquire(i16* %a) nounwind uwtable {
    550 entry:
    551   atomicrmw xor i16* %a, i16 0 acquire
    552   ret void
    553 }
    554 ; CHECK: atomic16_xor_acquire
    555 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 2)
    556 
    557 define void @atomic16_nand_acquire(i16* %a) nounwind uwtable {
    558 entry:
    559   atomicrmw nand i16* %a, i16 0 acquire
    560   ret void
    561 }
    562 ; CHECK: atomic16_nand_acquire
    563 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 2)
    564 
    565 define void @atomic16_xchg_release(i16* %a) nounwind uwtable {
    566 entry:
    567   atomicrmw xchg i16* %a, i16 0 release
    568   ret void
    569 }
    570 ; CHECK: atomic16_xchg_release
    571 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 3)
    572 
    573 define void @atomic16_add_release(i16* %a) nounwind uwtable {
    574 entry:
    575   atomicrmw add i16* %a, i16 0 release
    576   ret void
    577 }
    578 ; CHECK: atomic16_add_release
    579 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 3)
    580 
    581 define void @atomic16_sub_release(i16* %a) nounwind uwtable {
    582 entry:
    583   atomicrmw sub i16* %a, i16 0 release
    584   ret void
    585 }
    586 ; CHECK: atomic16_sub_release
    587 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 3)
    588 
    589 define void @atomic16_and_release(i16* %a) nounwind uwtable {
    590 entry:
    591   atomicrmw and i16* %a, i16 0 release
    592   ret void
    593 }
    594 ; CHECK: atomic16_and_release
    595 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 3)
    596 
    597 define void @atomic16_or_release(i16* %a) nounwind uwtable {
    598 entry:
    599   atomicrmw or i16* %a, i16 0 release
    600   ret void
    601 }
    602 ; CHECK: atomic16_or_release
    603 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 3)
    604 
    605 define void @atomic16_xor_release(i16* %a) nounwind uwtable {
    606 entry:
    607   atomicrmw xor i16* %a, i16 0 release
    608   ret void
    609 }
    610 ; CHECK: atomic16_xor_release
    611 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 3)
    612 
    613 define void @atomic16_nand_release(i16* %a) nounwind uwtable {
    614 entry:
    615   atomicrmw nand i16* %a, i16 0 release
    616   ret void
    617 }
    618 ; CHECK: atomic16_nand_release
    619 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 3)
    620 
    621 define void @atomic16_xchg_acq_rel(i16* %a) nounwind uwtable {
    622 entry:
    623   atomicrmw xchg i16* %a, i16 0 acq_rel
    624   ret void
    625 }
    626 ; CHECK: atomic16_xchg_acq_rel
    627 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 4)
    628 
    629 define void @atomic16_add_acq_rel(i16* %a) nounwind uwtable {
    630 entry:
    631   atomicrmw add i16* %a, i16 0 acq_rel
    632   ret void
    633 }
    634 ; CHECK: atomic16_add_acq_rel
    635 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 4)
    636 
    637 define void @atomic16_sub_acq_rel(i16* %a) nounwind uwtable {
    638 entry:
    639   atomicrmw sub i16* %a, i16 0 acq_rel
    640   ret void
    641 }
    642 ; CHECK: atomic16_sub_acq_rel
    643 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 4)
    644 
    645 define void @atomic16_and_acq_rel(i16* %a) nounwind uwtable {
    646 entry:
    647   atomicrmw and i16* %a, i16 0 acq_rel
    648   ret void
    649 }
    650 ; CHECK: atomic16_and_acq_rel
    651 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 4)
    652 
    653 define void @atomic16_or_acq_rel(i16* %a) nounwind uwtable {
    654 entry:
    655   atomicrmw or i16* %a, i16 0 acq_rel
    656   ret void
    657 }
    658 ; CHECK: atomic16_or_acq_rel
    659 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 4)
    660 
    661 define void @atomic16_xor_acq_rel(i16* %a) nounwind uwtable {
    662 entry:
    663   atomicrmw xor i16* %a, i16 0 acq_rel
    664   ret void
    665 }
    666 ; CHECK: atomic16_xor_acq_rel
    667 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 4)
    668 
    669 define void @atomic16_nand_acq_rel(i16* %a) nounwind uwtable {
    670 entry:
    671   atomicrmw nand i16* %a, i16 0 acq_rel
    672   ret void
    673 }
    674 ; CHECK: atomic16_nand_acq_rel
    675 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 4)
    676 
    677 define void @atomic16_xchg_seq_cst(i16* %a) nounwind uwtable {
    678 entry:
    679   atomicrmw xchg i16* %a, i16 0 seq_cst
    680   ret void
    681 }
    682 ; CHECK: atomic16_xchg_seq_cst
    683 ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 5)
    684 
    685 define void @atomic16_add_seq_cst(i16* %a) nounwind uwtable {
    686 entry:
    687   atomicrmw add i16* %a, i16 0 seq_cst
    688   ret void
    689 }
    690 ; CHECK: atomic16_add_seq_cst
    691 ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 5)
    692 
    693 define void @atomic16_sub_seq_cst(i16* %a) nounwind uwtable {
    694 entry:
    695   atomicrmw sub i16* %a, i16 0 seq_cst
    696   ret void
    697 }
    698 ; CHECK: atomic16_sub_seq_cst
    699 ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 5)
    700 
    701 define void @atomic16_and_seq_cst(i16* %a) nounwind uwtable {
    702 entry:
    703   atomicrmw and i16* %a, i16 0 seq_cst
    704   ret void
    705 }
    706 ; CHECK: atomic16_and_seq_cst
    707 ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 5)
    708 
    709 define void @atomic16_or_seq_cst(i16* %a) nounwind uwtable {
    710 entry:
    711   atomicrmw or i16* %a, i16 0 seq_cst
    712   ret void
    713 }
    714 ; CHECK: atomic16_or_seq_cst
    715 ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 5)
    716 
    717 define void @atomic16_xor_seq_cst(i16* %a) nounwind uwtable {
    718 entry:
    719   atomicrmw xor i16* %a, i16 0 seq_cst
    720   ret void
    721 }
    722 ; CHECK: atomic16_xor_seq_cst
    723 ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 5)
    724 
    725 define void @atomic16_nand_seq_cst(i16* %a) nounwind uwtable {
    726 entry:
    727   atomicrmw nand i16* %a, i16 0 seq_cst
    728   ret void
    729 }
    730 ; CHECK: atomic16_nand_seq_cst
    731 ; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 5)
    732 
    733 define void @atomic16_cas_monotonic(i16* %a) nounwind uwtable {
    734 entry:
    735   cmpxchg i16* %a, i16 0, i16 1 monotonic monotonic
    736   ret void
    737 }
    738 ; CHECK: atomic16_cas_monotonic
    739 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 0, i32 0)
    740 
    741 define void @atomic16_cas_acquire(i16* %a) nounwind uwtable {
    742 entry:
    743   cmpxchg i16* %a, i16 0, i16 1 acquire acquire
    744   ret void
    745 }
    746 ; CHECK: atomic16_cas_acquire
    747 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 2, i32 2)
    748 
    749 define void @atomic16_cas_release(i16* %a) nounwind uwtable {
    750 entry:
    751   cmpxchg i16* %a, i16 0, i16 1 release monotonic
    752   ret void
    753 }
    754 ; CHECK: atomic16_cas_release
    755 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 3, i32 0)
    756 
    757 define void @atomic16_cas_acq_rel(i16* %a) nounwind uwtable {
    758 entry:
    759   cmpxchg i16* %a, i16 0, i16 1 acq_rel acquire
    760   ret void
    761 }
    762 ; CHECK: atomic16_cas_acq_rel
    763 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 4, i32 2)
    764 
    765 define void @atomic16_cas_seq_cst(i16* %a) nounwind uwtable {
    766 entry:
    767   cmpxchg i16* %a, i16 0, i16 1 seq_cst seq_cst
    768   ret void
    769 }
    770 ; CHECK: atomic16_cas_seq_cst
    771 ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 5, i32 5)
    772 
    773 define i32 @atomic32_load_unordered(i32* %a) nounwind uwtable {
    774 entry:
    775   %0 = load atomic i32* %a unordered, align 4
    776   ret i32 %0
    777 }
    778 ; CHECK: atomic32_load_unordered
    779 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0)
    780 
    781 define i32 @atomic32_load_monotonic(i32* %a) nounwind uwtable {
    782 entry:
    783   %0 = load atomic i32* %a monotonic, align 4
    784   ret i32 %0
    785 }
    786 ; CHECK: atomic32_load_monotonic
    787 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0)
    788 
    789 define i32 @atomic32_load_acquire(i32* %a) nounwind uwtable {
    790 entry:
    791   %0 = load atomic i32* %a acquire, align 4
    792   ret i32 %0
    793 }
    794 ; CHECK: atomic32_load_acquire
    795 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 2)
    796 
    797 define i32 @atomic32_load_seq_cst(i32* %a) nounwind uwtable {
    798 entry:
    799   %0 = load atomic i32* %a seq_cst, align 4
    800   ret i32 %0
    801 }
    802 ; CHECK: atomic32_load_seq_cst
    803 ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 5)
    804 
    805 define void @atomic32_store_unordered(i32* %a) nounwind uwtable {
    806 entry:
    807   store atomic i32 0, i32* %a unordered, align 4
    808   ret void
    809 }
    810 ; CHECK: atomic32_store_unordered
    811 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0)
    812 
    813 define void @atomic32_store_monotonic(i32* %a) nounwind uwtable {
    814 entry:
    815   store atomic i32 0, i32* %a monotonic, align 4
    816   ret void
    817 }
    818 ; CHECK: atomic32_store_monotonic
    819 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0)
    820 
    821 define void @atomic32_store_release(i32* %a) nounwind uwtable {
    822 entry:
    823   store atomic i32 0, i32* %a release, align 4
    824   ret void
    825 }
    826 ; CHECK: atomic32_store_release
    827 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 3)
    828 
    829 define void @atomic32_store_seq_cst(i32* %a) nounwind uwtable {
    830 entry:
    831   store atomic i32 0, i32* %a seq_cst, align 4
    832   ret void
    833 }
    834 ; CHECK: atomic32_store_seq_cst
    835 ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 5)
    836 
    837 define void @atomic32_xchg_monotonic(i32* %a) nounwind uwtable {
    838 entry:
    839   atomicrmw xchg i32* %a, i32 0 monotonic
    840   ret void
    841 }
    842 ; CHECK: atomic32_xchg_monotonic
    843 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 0)
    844 
    845 define void @atomic32_add_monotonic(i32* %a) nounwind uwtable {
    846 entry:
    847   atomicrmw add i32* %a, i32 0 monotonic
    848   ret void
    849 }
    850 ; CHECK: atomic32_add_monotonic
    851 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 0)
    852 
    853 define void @atomic32_sub_monotonic(i32* %a) nounwind uwtable {
    854 entry:
    855   atomicrmw sub i32* %a, i32 0 monotonic
    856   ret void
    857 }
    858 ; CHECK: atomic32_sub_monotonic
    859 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 0)
    860 
    861 define void @atomic32_and_monotonic(i32* %a) nounwind uwtable {
    862 entry:
    863   atomicrmw and i32* %a, i32 0 monotonic
    864   ret void
    865 }
    866 ; CHECK: atomic32_and_monotonic
    867 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 0)
    868 
    869 define void @atomic32_or_monotonic(i32* %a) nounwind uwtable {
    870 entry:
    871   atomicrmw or i32* %a, i32 0 monotonic
    872   ret void
    873 }
    874 ; CHECK: atomic32_or_monotonic
    875 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 0)
    876 
    877 define void @atomic32_xor_monotonic(i32* %a) nounwind uwtable {
    878 entry:
    879   atomicrmw xor i32* %a, i32 0 monotonic
    880   ret void
    881 }
    882 ; CHECK: atomic32_xor_monotonic
    883 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 0)
    884 
    885 define void @atomic32_nand_monotonic(i32* %a) nounwind uwtable {
    886 entry:
    887   atomicrmw nand i32* %a, i32 0 monotonic
    888   ret void
    889 }
    890 ; CHECK: atomic32_nand_monotonic
    891 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 0)
    892 
    893 define void @atomic32_xchg_acquire(i32* %a) nounwind uwtable {
    894 entry:
    895   atomicrmw xchg i32* %a, i32 0 acquire
    896   ret void
    897 }
    898 ; CHECK: atomic32_xchg_acquire
    899 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 2)
    900 
    901 define void @atomic32_add_acquire(i32* %a) nounwind uwtable {
    902 entry:
    903   atomicrmw add i32* %a, i32 0 acquire
    904   ret void
    905 }
    906 ; CHECK: atomic32_add_acquire
    907 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 2)
    908 
    909 define void @atomic32_sub_acquire(i32* %a) nounwind uwtable {
    910 entry:
    911   atomicrmw sub i32* %a, i32 0 acquire
    912   ret void
    913 }
    914 ; CHECK: atomic32_sub_acquire
    915 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 2)
    916 
    917 define void @atomic32_and_acquire(i32* %a) nounwind uwtable {
    918 entry:
    919   atomicrmw and i32* %a, i32 0 acquire
    920   ret void
    921 }
    922 ; CHECK: atomic32_and_acquire
    923 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 2)
    924 
    925 define void @atomic32_or_acquire(i32* %a) nounwind uwtable {
    926 entry:
    927   atomicrmw or i32* %a, i32 0 acquire
    928   ret void
    929 }
    930 ; CHECK: atomic32_or_acquire
    931 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 2)
    932 
    933 define void @atomic32_xor_acquire(i32* %a) nounwind uwtable {
    934 entry:
    935   atomicrmw xor i32* %a, i32 0 acquire
    936   ret void
    937 }
    938 ; CHECK: atomic32_xor_acquire
    939 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 2)
    940 
    941 define void @atomic32_nand_acquire(i32* %a) nounwind uwtable {
    942 entry:
    943   atomicrmw nand i32* %a, i32 0 acquire
    944   ret void
    945 }
    946 ; CHECK: atomic32_nand_acquire
    947 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 2)
    948 
    949 define void @atomic32_xchg_release(i32* %a) nounwind uwtable {
    950 entry:
    951   atomicrmw xchg i32* %a, i32 0 release
    952   ret void
    953 }
    954 ; CHECK: atomic32_xchg_release
    955 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 3)
    956 
    957 define void @atomic32_add_release(i32* %a) nounwind uwtable {
    958 entry:
    959   atomicrmw add i32* %a, i32 0 release
    960   ret void
    961 }
    962 ; CHECK: atomic32_add_release
    963 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 3)
    964 
    965 define void @atomic32_sub_release(i32* %a) nounwind uwtable {
    966 entry:
    967   atomicrmw sub i32* %a, i32 0 release
    968   ret void
    969 }
    970 ; CHECK: atomic32_sub_release
    971 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 3)
    972 
    973 define void @atomic32_and_release(i32* %a) nounwind uwtable {
    974 entry:
    975   atomicrmw and i32* %a, i32 0 release
    976   ret void
    977 }
    978 ; CHECK: atomic32_and_release
    979 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 3)
    980 
    981 define void @atomic32_or_release(i32* %a) nounwind uwtable {
    982 entry:
    983   atomicrmw or i32* %a, i32 0 release
    984   ret void
    985 }
    986 ; CHECK: atomic32_or_release
    987 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 3)
    988 
    989 define void @atomic32_xor_release(i32* %a) nounwind uwtable {
    990 entry:
    991   atomicrmw xor i32* %a, i32 0 release
    992   ret void
    993 }
    994 ; CHECK: atomic32_xor_release
    995 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 3)
    996 
    997 define void @atomic32_nand_release(i32* %a) nounwind uwtable {
    998 entry:
    999   atomicrmw nand i32* %a, i32 0 release
   1000   ret void
   1001 }
   1002 ; CHECK: atomic32_nand_release
   1003 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 3)
   1004 
   1005 define void @atomic32_xchg_acq_rel(i32* %a) nounwind uwtable {
   1006 entry:
   1007   atomicrmw xchg i32* %a, i32 0 acq_rel
   1008   ret void
   1009 }
   1010 ; CHECK: atomic32_xchg_acq_rel
   1011 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 4)
   1012 
   1013 define void @atomic32_add_acq_rel(i32* %a) nounwind uwtable {
   1014 entry:
   1015   atomicrmw add i32* %a, i32 0 acq_rel
   1016   ret void
   1017 }
   1018 ; CHECK: atomic32_add_acq_rel
   1019 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 4)
   1020 
   1021 define void @atomic32_sub_acq_rel(i32* %a) nounwind uwtable {
   1022 entry:
   1023   atomicrmw sub i32* %a, i32 0 acq_rel
   1024   ret void
   1025 }
   1026 ; CHECK: atomic32_sub_acq_rel
   1027 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 4)
   1028 
   1029 define void @atomic32_and_acq_rel(i32* %a) nounwind uwtable {
   1030 entry:
   1031   atomicrmw and i32* %a, i32 0 acq_rel
   1032   ret void
   1033 }
   1034 ; CHECK: atomic32_and_acq_rel
   1035 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 4)
   1036 
   1037 define void @atomic32_or_acq_rel(i32* %a) nounwind uwtable {
   1038 entry:
   1039   atomicrmw or i32* %a, i32 0 acq_rel
   1040   ret void
   1041 }
   1042 ; CHECK: atomic32_or_acq_rel
   1043 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 4)
   1044 
   1045 define void @atomic32_xor_acq_rel(i32* %a) nounwind uwtable {
   1046 entry:
   1047   atomicrmw xor i32* %a, i32 0 acq_rel
   1048   ret void
   1049 }
   1050 ; CHECK: atomic32_xor_acq_rel
   1051 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 4)
   1052 
   1053 define void @atomic32_nand_acq_rel(i32* %a) nounwind uwtable {
   1054 entry:
   1055   atomicrmw nand i32* %a, i32 0 acq_rel
   1056   ret void
   1057 }
   1058 ; CHECK: atomic32_nand_acq_rel
   1059 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 4)
   1060 
   1061 define void @atomic32_xchg_seq_cst(i32* %a) nounwind uwtable {
   1062 entry:
   1063   atomicrmw xchg i32* %a, i32 0 seq_cst
   1064   ret void
   1065 }
   1066 ; CHECK: atomic32_xchg_seq_cst
   1067 ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 5)
   1068 
   1069 define void @atomic32_add_seq_cst(i32* %a) nounwind uwtable {
   1070 entry:
   1071   atomicrmw add i32* %a, i32 0 seq_cst
   1072   ret void
   1073 }
   1074 ; CHECK: atomic32_add_seq_cst
   1075 ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 5)
   1076 
   1077 define void @atomic32_sub_seq_cst(i32* %a) nounwind uwtable {
   1078 entry:
   1079   atomicrmw sub i32* %a, i32 0 seq_cst
   1080   ret void
   1081 }
   1082 ; CHECK: atomic32_sub_seq_cst
   1083 ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 5)
   1084 
   1085 define void @atomic32_and_seq_cst(i32* %a) nounwind uwtable {
   1086 entry:
   1087   atomicrmw and i32* %a, i32 0 seq_cst
   1088   ret void
   1089 }
   1090 ; CHECK: atomic32_and_seq_cst
   1091 ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 5)
   1092 
   1093 define void @atomic32_or_seq_cst(i32* %a) nounwind uwtable {
   1094 entry:
   1095   atomicrmw or i32* %a, i32 0 seq_cst
   1096   ret void
   1097 }
   1098 ; CHECK: atomic32_or_seq_cst
   1099 ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 5)
   1100 
   1101 define void @atomic32_xor_seq_cst(i32* %a) nounwind uwtable {
   1102 entry:
   1103   atomicrmw xor i32* %a, i32 0 seq_cst
   1104   ret void
   1105 }
   1106 ; CHECK: atomic32_xor_seq_cst
   1107 ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 5)
   1108 
   1109 define void @atomic32_nand_seq_cst(i32* %a) nounwind uwtable {
   1110 entry:
   1111   atomicrmw nand i32* %a, i32 0 seq_cst
   1112   ret void
   1113 }
   1114 ; CHECK: atomic32_nand_seq_cst
   1115 ; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 5)
   1116 
   1117 define void @atomic32_cas_monotonic(i32* %a) nounwind uwtable {
   1118 entry:
   1119   cmpxchg i32* %a, i32 0, i32 1 monotonic monotonic
   1120   ret void
   1121 }
   1122 ; CHECK: atomic32_cas_monotonic
   1123 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 0, i32 0)
   1124 
   1125 define void @atomic32_cas_acquire(i32* %a) nounwind uwtable {
   1126 entry:
   1127   cmpxchg i32* %a, i32 0, i32 1 acquire acquire
   1128   ret void
   1129 }
   1130 ; CHECK: atomic32_cas_acquire
   1131 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 2, i32 2)
   1132 
   1133 define void @atomic32_cas_release(i32* %a) nounwind uwtable {
   1134 entry:
   1135   cmpxchg i32* %a, i32 0, i32 1 release monotonic
   1136   ret void
   1137 }
   1138 ; CHECK: atomic32_cas_release
   1139 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 3, i32 0)
   1140 
   1141 define void @atomic32_cas_acq_rel(i32* %a) nounwind uwtable {
   1142 entry:
   1143   cmpxchg i32* %a, i32 0, i32 1 acq_rel acquire
   1144   ret void
   1145 }
   1146 ; CHECK: atomic32_cas_acq_rel
   1147 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 4, i32 2)
   1148 
   1149 define void @atomic32_cas_seq_cst(i32* %a) nounwind uwtable {
   1150 entry:
   1151   cmpxchg i32* %a, i32 0, i32 1 seq_cst seq_cst
   1152   ret void
   1153 }
   1154 ; CHECK: atomic32_cas_seq_cst
   1155 ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 5, i32 5)
   1156 
   1157 define i64 @atomic64_load_unordered(i64* %a) nounwind uwtable {
   1158 entry:
   1159   %0 = load atomic i64* %a unordered, align 8
   1160   ret i64 %0
   1161 }
   1162 ; CHECK: atomic64_load_unordered
   1163 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0)
   1164 
   1165 define i64 @atomic64_load_monotonic(i64* %a) nounwind uwtable {
   1166 entry:
   1167   %0 = load atomic i64* %a monotonic, align 8
   1168   ret i64 %0
   1169 }
   1170 ; CHECK: atomic64_load_monotonic
   1171 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0)
   1172 
   1173 define i64 @atomic64_load_acquire(i64* %a) nounwind uwtable {
   1174 entry:
   1175   %0 = load atomic i64* %a acquire, align 8
   1176   ret i64 %0
   1177 }
   1178 ; CHECK: atomic64_load_acquire
   1179 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 2)
   1180 
   1181 define i64 @atomic64_load_seq_cst(i64* %a) nounwind uwtable {
   1182 entry:
   1183   %0 = load atomic i64* %a seq_cst, align 8
   1184   ret i64 %0
   1185 }
   1186 ; CHECK: atomic64_load_seq_cst
   1187 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 5)
   1188 
   1189 define void @atomic64_store_unordered(i64* %a) nounwind uwtable {
   1190 entry:
   1191   store atomic i64 0, i64* %a unordered, align 8
   1192   ret void
   1193 }
   1194 ; CHECK: atomic64_store_unordered
   1195 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0)
   1196 
   1197 define void @atomic64_store_monotonic(i64* %a) nounwind uwtable {
   1198 entry:
   1199   store atomic i64 0, i64* %a monotonic, align 8
   1200   ret void
   1201 }
   1202 ; CHECK: atomic64_store_monotonic
   1203 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0)
   1204 
   1205 define void @atomic64_store_release(i64* %a) nounwind uwtable {
   1206 entry:
   1207   store atomic i64 0, i64* %a release, align 8
   1208   ret void
   1209 }
   1210 ; CHECK: atomic64_store_release
   1211 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 3)
   1212 
   1213 define void @atomic64_store_seq_cst(i64* %a) nounwind uwtable {
   1214 entry:
   1215   store atomic i64 0, i64* %a seq_cst, align 8
   1216   ret void
   1217 }
   1218 ; CHECK: atomic64_store_seq_cst
   1219 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 5)
   1220 
   1221 define void @atomic64_xchg_monotonic(i64* %a) nounwind uwtable {
   1222 entry:
   1223   atomicrmw xchg i64* %a, i64 0 monotonic
   1224   ret void
   1225 }
   1226 ; CHECK: atomic64_xchg_monotonic
   1227 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 0)
   1228 
   1229 define void @atomic64_add_monotonic(i64* %a) nounwind uwtable {
   1230 entry:
   1231   atomicrmw add i64* %a, i64 0 monotonic
   1232   ret void
   1233 }
   1234 ; CHECK: atomic64_add_monotonic
   1235 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 0)
   1236 
   1237 define void @atomic64_sub_monotonic(i64* %a) nounwind uwtable {
   1238 entry:
   1239   atomicrmw sub i64* %a, i64 0 monotonic
   1240   ret void
   1241 }
   1242 ; CHECK: atomic64_sub_monotonic
   1243 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 0)
   1244 
   1245 define void @atomic64_and_monotonic(i64* %a) nounwind uwtable {
   1246 entry:
   1247   atomicrmw and i64* %a, i64 0 monotonic
   1248   ret void
   1249 }
   1250 ; CHECK: atomic64_and_monotonic
   1251 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 0)
   1252 
   1253 define void @atomic64_or_monotonic(i64* %a) nounwind uwtable {
   1254 entry:
   1255   atomicrmw or i64* %a, i64 0 monotonic
   1256   ret void
   1257 }
   1258 ; CHECK: atomic64_or_monotonic
   1259 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 0)
   1260 
   1261 define void @atomic64_xor_monotonic(i64* %a) nounwind uwtable {
   1262 entry:
   1263   atomicrmw xor i64* %a, i64 0 monotonic
   1264   ret void
   1265 }
   1266 ; CHECK: atomic64_xor_monotonic
   1267 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 0)
   1268 
   1269 define void @atomic64_nand_monotonic(i64* %a) nounwind uwtable {
   1270 entry:
   1271   atomicrmw nand i64* %a, i64 0 monotonic
   1272   ret void
   1273 }
   1274 ; CHECK: atomic64_nand_monotonic
   1275 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 0)
   1276 
   1277 define void @atomic64_xchg_acquire(i64* %a) nounwind uwtable {
   1278 entry:
   1279   atomicrmw xchg i64* %a, i64 0 acquire
   1280   ret void
   1281 }
   1282 ; CHECK: atomic64_xchg_acquire
   1283 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 2)
   1284 
   1285 define void @atomic64_add_acquire(i64* %a) nounwind uwtable {
   1286 entry:
   1287   atomicrmw add i64* %a, i64 0 acquire
   1288   ret void
   1289 }
   1290 ; CHECK: atomic64_add_acquire
   1291 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 2)
   1292 
   1293 define void @atomic64_sub_acquire(i64* %a) nounwind uwtable {
   1294 entry:
   1295   atomicrmw sub i64* %a, i64 0 acquire
   1296   ret void
   1297 }
   1298 ; CHECK: atomic64_sub_acquire
   1299 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 2)
   1300 
   1301 define void @atomic64_and_acquire(i64* %a) nounwind uwtable {
   1302 entry:
   1303   atomicrmw and i64* %a, i64 0 acquire
   1304   ret void
   1305 }
   1306 ; CHECK: atomic64_and_acquire
   1307 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 2)
   1308 
   1309 define void @atomic64_or_acquire(i64* %a) nounwind uwtable {
   1310 entry:
   1311   atomicrmw or i64* %a, i64 0 acquire
   1312   ret void
   1313 }
   1314 ; CHECK: atomic64_or_acquire
   1315 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 2)
   1316 
   1317 define void @atomic64_xor_acquire(i64* %a) nounwind uwtable {
   1318 entry:
   1319   atomicrmw xor i64* %a, i64 0 acquire
   1320   ret void
   1321 }
   1322 ; CHECK: atomic64_xor_acquire
   1323 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 2)
   1324 
   1325 define void @atomic64_nand_acquire(i64* %a) nounwind uwtable {
   1326 entry:
   1327   atomicrmw nand i64* %a, i64 0 acquire
   1328   ret void
   1329 }
   1330 ; CHECK: atomic64_nand_acquire
   1331 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 2)
   1332 
   1333 define void @atomic64_xchg_release(i64* %a) nounwind uwtable {
   1334 entry:
   1335   atomicrmw xchg i64* %a, i64 0 release
   1336   ret void
   1337 }
   1338 ; CHECK: atomic64_xchg_release
   1339 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 3)
   1340 
   1341 define void @atomic64_add_release(i64* %a) nounwind uwtable {
   1342 entry:
   1343   atomicrmw add i64* %a, i64 0 release
   1344   ret void
   1345 }
   1346 ; CHECK: atomic64_add_release
   1347 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 3)
   1348 
   1349 define void @atomic64_sub_release(i64* %a) nounwind uwtable {
   1350 entry:
   1351   atomicrmw sub i64* %a, i64 0 release
   1352   ret void
   1353 }
   1354 ; CHECK: atomic64_sub_release
   1355 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 3)
   1356 
   1357 define void @atomic64_and_release(i64* %a) nounwind uwtable {
   1358 entry:
   1359   atomicrmw and i64* %a, i64 0 release
   1360   ret void
   1361 }
   1362 ; CHECK: atomic64_and_release
   1363 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 3)
   1364 
   1365 define void @atomic64_or_release(i64* %a) nounwind uwtable {
   1366 entry:
   1367   atomicrmw or i64* %a, i64 0 release
   1368   ret void
   1369 }
   1370 ; CHECK: atomic64_or_release
   1371 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 3)
   1372 
   1373 define void @atomic64_xor_release(i64* %a) nounwind uwtable {
   1374 entry:
   1375   atomicrmw xor i64* %a, i64 0 release
   1376   ret void
   1377 }
   1378 ; CHECK: atomic64_xor_release
   1379 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 3)
   1380 
   1381 define void @atomic64_nand_release(i64* %a) nounwind uwtable {
   1382 entry:
   1383   atomicrmw nand i64* %a, i64 0 release
   1384   ret void
   1385 }
   1386 ; CHECK: atomic64_nand_release
   1387 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 3)
   1388 
   1389 define void @atomic64_xchg_acq_rel(i64* %a) nounwind uwtable {
   1390 entry:
   1391   atomicrmw xchg i64* %a, i64 0 acq_rel
   1392   ret void
   1393 }
   1394 ; CHECK: atomic64_xchg_acq_rel
   1395 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 4)
   1396 
   1397 define void @atomic64_add_acq_rel(i64* %a) nounwind uwtable {
   1398 entry:
   1399   atomicrmw add i64* %a, i64 0 acq_rel
   1400   ret void
   1401 }
   1402 ; CHECK: atomic64_add_acq_rel
   1403 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 4)
   1404 
   1405 define void @atomic64_sub_acq_rel(i64* %a) nounwind uwtable {
   1406 entry:
   1407   atomicrmw sub i64* %a, i64 0 acq_rel
   1408   ret void
   1409 }
   1410 ; CHECK: atomic64_sub_acq_rel
   1411 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 4)
   1412 
   1413 define void @atomic64_and_acq_rel(i64* %a) nounwind uwtable {
   1414 entry:
   1415   atomicrmw and i64* %a, i64 0 acq_rel
   1416   ret void
   1417 }
   1418 ; CHECK: atomic64_and_acq_rel
   1419 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 4)
   1420 
   1421 define void @atomic64_or_acq_rel(i64* %a) nounwind uwtable {
   1422 entry:
   1423   atomicrmw or i64* %a, i64 0 acq_rel
   1424   ret void
   1425 }
   1426 ; CHECK: atomic64_or_acq_rel
   1427 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 4)
   1428 
   1429 define void @atomic64_xor_acq_rel(i64* %a) nounwind uwtable {
   1430 entry:
   1431   atomicrmw xor i64* %a, i64 0 acq_rel
   1432   ret void
   1433 }
   1434 ; CHECK: atomic64_xor_acq_rel
   1435 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 4)
   1436 
   1437 define void @atomic64_nand_acq_rel(i64* %a) nounwind uwtable {
   1438 entry:
   1439   atomicrmw nand i64* %a, i64 0 acq_rel
   1440   ret void
   1441 }
   1442 ; CHECK: atomic64_nand_acq_rel
   1443 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 4)
   1444 
   1445 define void @atomic64_xchg_seq_cst(i64* %a) nounwind uwtable {
   1446 entry:
   1447   atomicrmw xchg i64* %a, i64 0 seq_cst
   1448   ret void
   1449 }
   1450 ; CHECK: atomic64_xchg_seq_cst
   1451 ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 5)
   1452 
   1453 define void @atomic64_add_seq_cst(i64* %a) nounwind uwtable {
   1454 entry:
   1455   atomicrmw add i64* %a, i64 0 seq_cst
   1456   ret void
   1457 }
   1458 ; CHECK: atomic64_add_seq_cst
   1459 ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 5)
   1460 
   1461 define void @atomic64_sub_seq_cst(i64* %a) nounwind uwtable {
   1462 entry:
   1463   atomicrmw sub i64* %a, i64 0 seq_cst
   1464   ret void
   1465 }
   1466 ; CHECK: atomic64_sub_seq_cst
   1467 ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 5)
   1468 
   1469 define void @atomic64_and_seq_cst(i64* %a) nounwind uwtable {
   1470 entry:
   1471   atomicrmw and i64* %a, i64 0 seq_cst
   1472   ret void
   1473 }
   1474 ; CHECK: atomic64_and_seq_cst
   1475 ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 5)
   1476 
   1477 define void @atomic64_or_seq_cst(i64* %a) nounwind uwtable {
   1478 entry:
   1479   atomicrmw or i64* %a, i64 0 seq_cst
   1480   ret void
   1481 }
   1482 ; CHECK: atomic64_or_seq_cst
   1483 ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 5)
   1484 
   1485 define void @atomic64_xor_seq_cst(i64* %a) nounwind uwtable {
   1486 entry:
   1487   atomicrmw xor i64* %a, i64 0 seq_cst
   1488   ret void
   1489 }
   1490 ; CHECK: atomic64_xor_seq_cst
   1491 ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 5)
   1492 
   1493 define void @atomic64_nand_seq_cst(i64* %a) nounwind uwtable {
   1494 entry:
   1495   atomicrmw nand i64* %a, i64 0 seq_cst
   1496   ret void
   1497 }
   1498 ; CHECK: atomic64_nand_seq_cst
   1499 ; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 5)
   1500 
   1501 define void @atomic64_cas_monotonic(i64* %a) nounwind uwtable {
   1502 entry:
   1503   cmpxchg i64* %a, i64 0, i64 1 monotonic monotonic
   1504   ret void
   1505 }
   1506 ; CHECK: atomic64_cas_monotonic
   1507 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 0, i32 0)
   1508 
   1509 define void @atomic64_cas_acquire(i64* %a) nounwind uwtable {
   1510 entry:
   1511   cmpxchg i64* %a, i64 0, i64 1 acquire acquire
   1512   ret void
   1513 }
   1514 ; CHECK: atomic64_cas_acquire
   1515 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 2, i32 2)
   1516 
   1517 define void @atomic64_cas_release(i64* %a) nounwind uwtable {
   1518 entry:
   1519   cmpxchg i64* %a, i64 0, i64 1 release monotonic
   1520   ret void
   1521 }
   1522 ; CHECK: atomic64_cas_release
   1523 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 3, i32 0)
   1524 
   1525 define void @atomic64_cas_acq_rel(i64* %a) nounwind uwtable {
   1526 entry:
   1527   cmpxchg i64* %a, i64 0, i64 1 acq_rel acquire
   1528   ret void
   1529 }
   1530 ; CHECK: atomic64_cas_acq_rel
   1531 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 4, i32 2)
   1532 
   1533 define void @atomic64_cas_seq_cst(i64* %a) nounwind uwtable {
   1534 entry:
   1535   cmpxchg i64* %a, i64 0, i64 1 seq_cst seq_cst
   1536   ret void
   1537 }
   1538 ; CHECK: atomic64_cas_seq_cst
   1539 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 5, i32 5)
   1540 
   1541 define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable {
   1542 entry:
   1543   %0 = load atomic i128* %a unordered, align 16
   1544   ret i128 %0
   1545 }
   1546 ; CHECK: atomic128_load_unordered
   1547 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0)
   1548 
   1549 define i128 @atomic128_load_monotonic(i128* %a) nounwind uwtable {
   1550 entry:
   1551   %0 = load atomic i128* %a monotonic, align 16
   1552   ret i128 %0
   1553 }
   1554 ; CHECK: atomic128_load_monotonic
   1555 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0)
   1556 
   1557 define i128 @atomic128_load_acquire(i128* %a) nounwind uwtable {
   1558 entry:
   1559   %0 = load atomic i128* %a acquire, align 16
   1560   ret i128 %0
   1561 }
   1562 ; CHECK: atomic128_load_acquire
   1563 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 2)
   1564 
   1565 define i128 @atomic128_load_seq_cst(i128* %a) nounwind uwtable {
   1566 entry:
   1567   %0 = load atomic i128* %a seq_cst, align 16
   1568   ret i128 %0
   1569 }
   1570 ; CHECK: atomic128_load_seq_cst
   1571 ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 5)
   1572 
   1573 define void @atomic128_store_unordered(i128* %a) nounwind uwtable {
   1574 entry:
   1575   store atomic i128 0, i128* %a unordered, align 16
   1576   ret void
   1577 }
   1578 ; CHECK: atomic128_store_unordered
   1579 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0)
   1580 
   1581 define void @atomic128_store_monotonic(i128* %a) nounwind uwtable {
   1582 entry:
   1583   store atomic i128 0, i128* %a monotonic, align 16
   1584   ret void
   1585 }
   1586 ; CHECK: atomic128_store_monotonic
   1587 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0)
   1588 
   1589 define void @atomic128_store_release(i128* %a) nounwind uwtable {
   1590 entry:
   1591   store atomic i128 0, i128* %a release, align 16
   1592   ret void
   1593 }
   1594 ; CHECK: atomic128_store_release
   1595 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 3)
   1596 
   1597 define void @atomic128_store_seq_cst(i128* %a) nounwind uwtable {
   1598 entry:
   1599   store atomic i128 0, i128* %a seq_cst, align 16
   1600   ret void
   1601 }
   1602 ; CHECK: atomic128_store_seq_cst
   1603 ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 5)
   1604 
   1605 define void @atomic128_xchg_monotonic(i128* %a) nounwind uwtable {
   1606 entry:
   1607   atomicrmw xchg i128* %a, i128 0 monotonic
   1608   ret void
   1609 }
   1610 ; CHECK: atomic128_xchg_monotonic
   1611 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 0)
   1612 
   1613 define void @atomic128_add_monotonic(i128* %a) nounwind uwtable {
   1614 entry:
   1615   atomicrmw add i128* %a, i128 0 monotonic
   1616   ret void
   1617 }
   1618 ; CHECK: atomic128_add_monotonic
   1619 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 0)
   1620 
   1621 define void @atomic128_sub_monotonic(i128* %a) nounwind uwtable {
   1622 entry:
   1623   atomicrmw sub i128* %a, i128 0 monotonic
   1624   ret void
   1625 }
   1626 ; CHECK: atomic128_sub_monotonic
   1627 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 0)
   1628 
   1629 define void @atomic128_and_monotonic(i128* %a) nounwind uwtable {
   1630 entry:
   1631   atomicrmw and i128* %a, i128 0 monotonic
   1632   ret void
   1633 }
   1634 ; CHECK: atomic128_and_monotonic
   1635 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 0)
   1636 
   1637 define void @atomic128_or_monotonic(i128* %a) nounwind uwtable {
   1638 entry:
   1639   atomicrmw or i128* %a, i128 0 monotonic
   1640   ret void
   1641 }
   1642 ; CHECK: atomic128_or_monotonic
   1643 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 0)
   1644 
   1645 define void @atomic128_xor_monotonic(i128* %a) nounwind uwtable {
   1646 entry:
   1647   atomicrmw xor i128* %a, i128 0 monotonic
   1648   ret void
   1649 }
   1650 ; CHECK: atomic128_xor_monotonic
   1651 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 0)
   1652 
   1653 define void @atomic128_nand_monotonic(i128* %a) nounwind uwtable {
   1654 entry:
   1655   atomicrmw nand i128* %a, i128 0 monotonic
   1656   ret void
   1657 }
   1658 ; CHECK: atomic128_nand_monotonic
   1659 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 0)
   1660 
   1661 define void @atomic128_xchg_acquire(i128* %a) nounwind uwtable {
   1662 entry:
   1663   atomicrmw xchg i128* %a, i128 0 acquire
   1664   ret void
   1665 }
   1666 ; CHECK: atomic128_xchg_acquire
   1667 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 2)
   1668 
   1669 define void @atomic128_add_acquire(i128* %a) nounwind uwtable {
   1670 entry:
   1671   atomicrmw add i128* %a, i128 0 acquire
   1672   ret void
   1673 }
   1674 ; CHECK: atomic128_add_acquire
   1675 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 2)
   1676 
   1677 define void @atomic128_sub_acquire(i128* %a) nounwind uwtable {
   1678 entry:
   1679   atomicrmw sub i128* %a, i128 0 acquire
   1680   ret void
   1681 }
   1682 ; CHECK: atomic128_sub_acquire
   1683 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 2)
   1684 
   1685 define void @atomic128_and_acquire(i128* %a) nounwind uwtable {
   1686 entry:
   1687   atomicrmw and i128* %a, i128 0 acquire
   1688   ret void
   1689 }
   1690 ; CHECK: atomic128_and_acquire
   1691 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 2)
   1692 
   1693 define void @atomic128_or_acquire(i128* %a) nounwind uwtable {
   1694 entry:
   1695   atomicrmw or i128* %a, i128 0 acquire
   1696   ret void
   1697 }
   1698 ; CHECK: atomic128_or_acquire
   1699 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 2)
   1700 
   1701 define void @atomic128_xor_acquire(i128* %a) nounwind uwtable {
   1702 entry:
   1703   atomicrmw xor i128* %a, i128 0 acquire
   1704   ret void
   1705 }
   1706 ; CHECK: atomic128_xor_acquire
   1707 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 2)
   1708 
   1709 define void @atomic128_nand_acquire(i128* %a) nounwind uwtable {
   1710 entry:
   1711   atomicrmw nand i128* %a, i128 0 acquire
   1712   ret void
   1713 }
   1714 ; CHECK: atomic128_nand_acquire
   1715 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 2)
   1716 
   1717 define void @atomic128_xchg_release(i128* %a) nounwind uwtable {
   1718 entry:
   1719   atomicrmw xchg i128* %a, i128 0 release
   1720   ret void
   1721 }
   1722 ; CHECK: atomic128_xchg_release
   1723 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 3)
   1724 
   1725 define void @atomic128_add_release(i128* %a) nounwind uwtable {
   1726 entry:
   1727   atomicrmw add i128* %a, i128 0 release
   1728   ret void
   1729 }
   1730 ; CHECK: atomic128_add_release
   1731 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 3)
   1732 
   1733 define void @atomic128_sub_release(i128* %a) nounwind uwtable {
   1734 entry:
   1735   atomicrmw sub i128* %a, i128 0 release
   1736   ret void
   1737 }
   1738 ; CHECK: atomic128_sub_release
   1739 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 3)
   1740 
   1741 define void @atomic128_and_release(i128* %a) nounwind uwtable {
   1742 entry:
   1743   atomicrmw and i128* %a, i128 0 release
   1744   ret void
   1745 }
   1746 ; CHECK: atomic128_and_release
   1747 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 3)
   1748 
   1749 define void @atomic128_or_release(i128* %a) nounwind uwtable {
   1750 entry:
   1751   atomicrmw or i128* %a, i128 0 release
   1752   ret void
   1753 }
   1754 ; CHECK: atomic128_or_release
   1755 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 3)
   1756 
   1757 define void @atomic128_xor_release(i128* %a) nounwind uwtable {
   1758 entry:
   1759   atomicrmw xor i128* %a, i128 0 release
   1760   ret void
   1761 }
   1762 ; CHECK: atomic128_xor_release
   1763 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 3)
   1764 
   1765 define void @atomic128_nand_release(i128* %a) nounwind uwtable {
   1766 entry:
   1767   atomicrmw nand i128* %a, i128 0 release
   1768   ret void
   1769 }
   1770 ; CHECK: atomic128_nand_release
   1771 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 3)
   1772 
   1773 define void @atomic128_xchg_acq_rel(i128* %a) nounwind uwtable {
   1774 entry:
   1775   atomicrmw xchg i128* %a, i128 0 acq_rel
   1776   ret void
   1777 }
   1778 ; CHECK: atomic128_xchg_acq_rel
   1779 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 4)
   1780 
   1781 define void @atomic128_add_acq_rel(i128* %a) nounwind uwtable {
   1782 entry:
   1783   atomicrmw add i128* %a, i128 0 acq_rel
   1784   ret void
   1785 }
   1786 ; CHECK: atomic128_add_acq_rel
   1787 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 4)
   1788 
   1789 define void @atomic128_sub_acq_rel(i128* %a) nounwind uwtable {
   1790 entry:
   1791   atomicrmw sub i128* %a, i128 0 acq_rel
   1792   ret void
   1793 }
   1794 ; CHECK: atomic128_sub_acq_rel
   1795 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 4)
   1796 
   1797 define void @atomic128_and_acq_rel(i128* %a) nounwind uwtable {
   1798 entry:
   1799   atomicrmw and i128* %a, i128 0 acq_rel
   1800   ret void
   1801 }
   1802 ; CHECK: atomic128_and_acq_rel
   1803 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 4)
   1804 
   1805 define void @atomic128_or_acq_rel(i128* %a) nounwind uwtable {
   1806 entry:
   1807   atomicrmw or i128* %a, i128 0 acq_rel
   1808   ret void
   1809 }
   1810 ; CHECK: atomic128_or_acq_rel
   1811 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 4)
   1812 
   1813 define void @atomic128_xor_acq_rel(i128* %a) nounwind uwtable {
   1814 entry:
   1815   atomicrmw xor i128* %a, i128 0 acq_rel
   1816   ret void
   1817 }
   1818 ; CHECK: atomic128_xor_acq_rel
   1819 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 4)
   1820 
   1821 define void @atomic128_nand_acq_rel(i128* %a) nounwind uwtable {
   1822 entry:
   1823   atomicrmw nand i128* %a, i128 0 acq_rel
   1824   ret void
   1825 }
   1826 ; CHECK: atomic128_nand_acq_rel
   1827 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 4)
   1828 
   1829 define void @atomic128_xchg_seq_cst(i128* %a) nounwind uwtable {
   1830 entry:
   1831   atomicrmw xchg i128* %a, i128 0 seq_cst
   1832   ret void
   1833 }
   1834 ; CHECK: atomic128_xchg_seq_cst
   1835 ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 5)
   1836 
   1837 define void @atomic128_add_seq_cst(i128* %a) nounwind uwtable {
   1838 entry:
   1839   atomicrmw add i128* %a, i128 0 seq_cst
   1840   ret void
   1841 }
   1842 ; CHECK: atomic128_add_seq_cst
   1843 ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 5)
   1844 
   1845 define void @atomic128_sub_seq_cst(i128* %a) nounwind uwtable {
   1846 entry:
   1847   atomicrmw sub i128* %a, i128 0 seq_cst
   1848   ret void
   1849 }
   1850 ; CHECK: atomic128_sub_seq_cst
   1851 ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 5)
   1852 
   1853 define void @atomic128_and_seq_cst(i128* %a) nounwind uwtable {
   1854 entry:
   1855   atomicrmw and i128* %a, i128 0 seq_cst
   1856   ret void
   1857 }
   1858 ; CHECK: atomic128_and_seq_cst
   1859 ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 5)
   1860 
   1861 define void @atomic128_or_seq_cst(i128* %a) nounwind uwtable {
   1862 entry:
   1863   atomicrmw or i128* %a, i128 0 seq_cst
   1864   ret void
   1865 }
   1866 ; CHECK: atomic128_or_seq_cst
   1867 ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 5)
   1868 
   1869 define void @atomic128_xor_seq_cst(i128* %a) nounwind uwtable {
   1870 entry:
   1871   atomicrmw xor i128* %a, i128 0 seq_cst
   1872   ret void
   1873 }
   1874 ; CHECK: atomic128_xor_seq_cst
   1875 ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 5)
   1876 
   1877 define void @atomic128_nand_seq_cst(i128* %a) nounwind uwtable {
   1878 entry:
   1879   atomicrmw nand i128* %a, i128 0 seq_cst
   1880   ret void
   1881 }
   1882 ; CHECK: atomic128_nand_seq_cst
   1883 ; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 5)
   1884 
   1885 define void @atomic128_cas_monotonic(i128* %a) nounwind uwtable {
   1886 entry:
   1887   cmpxchg i128* %a, i128 0, i128 1 monotonic monotonic
   1888   ret void
   1889 }
   1890 ; CHECK: atomic128_cas_monotonic
   1891 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 0, i32 0)
   1892 
   1893 define void @atomic128_cas_acquire(i128* %a) nounwind uwtable {
   1894 entry:
   1895   cmpxchg i128* %a, i128 0, i128 1 acquire acquire
   1896   ret void
   1897 }
   1898 ; CHECK: atomic128_cas_acquire
   1899 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 2, i32 2)
   1900 
   1901 define void @atomic128_cas_release(i128* %a) nounwind uwtable {
   1902 entry:
   1903   cmpxchg i128* %a, i128 0, i128 1 release monotonic
   1904   ret void
   1905 }
   1906 ; CHECK: atomic128_cas_release
   1907 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 3, i32 0)
   1908 
   1909 define void @atomic128_cas_acq_rel(i128* %a) nounwind uwtable {
   1910 entry:
   1911   cmpxchg i128* %a, i128 0, i128 1 acq_rel acquire
   1912   ret void
   1913 }
   1914 ; CHECK: atomic128_cas_acq_rel
   1915 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 4, i32 2)
   1916 
   1917 define void @atomic128_cas_seq_cst(i128* %a) nounwind uwtable {
   1918 entry:
   1919   cmpxchg i128* %a, i128 0, i128 1 seq_cst seq_cst
   1920   ret void
   1921 }
   1922 ; CHECK: atomic128_cas_seq_cst
   1923 ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 5, i32 5)
   1924 
   1925 define void @atomic_signal_fence_acquire() nounwind uwtable {
   1926 entry:
   1927   fence singlethread acquire
   1928   ret void
   1929 }
   1930 ; CHECK: atomic_signal_fence_acquire
   1931 ; CHECK: call void @__tsan_atomic_signal_fence(i32 2)
   1932 
   1933 define void @atomic_thread_fence_acquire() nounwind uwtable {
   1934 entry:
   1935   fence  acquire
   1936   ret void
   1937 }
   1938 ; CHECK: atomic_thread_fence_acquire
   1939 ; CHECK: call void @__tsan_atomic_thread_fence(i32 2)
   1940 
   1941 define void @atomic_signal_fence_release() nounwind uwtable {
   1942 entry:
   1943   fence singlethread release
   1944   ret void
   1945 }
   1946 ; CHECK: atomic_signal_fence_release
   1947 ; CHECK: call void @__tsan_atomic_signal_fence(i32 3)
   1948 
   1949 define void @atomic_thread_fence_release() nounwind uwtable {
   1950 entry:
   1951   fence  release
   1952   ret void
   1953 }
   1954 ; CHECK: atomic_thread_fence_release
   1955 ; CHECK: call void @__tsan_atomic_thread_fence(i32 3)
   1956 
   1957 define void @atomic_signal_fence_acq_rel() nounwind uwtable {
   1958 entry:
   1959   fence singlethread acq_rel
   1960   ret void
   1961 }
   1962 ; CHECK: atomic_signal_fence_acq_rel
   1963 ; CHECK: call void @__tsan_atomic_signal_fence(i32 4)
   1964 
   1965 define void @atomic_thread_fence_acq_rel() nounwind uwtable {
   1966 entry:
   1967   fence  acq_rel
   1968   ret void
   1969 }
   1970 ; CHECK: atomic_thread_fence_acq_rel
   1971 ; CHECK: call void @__tsan_atomic_thread_fence(i32 4)
   1972 
   1973 define void @atomic_signal_fence_seq_cst() nounwind uwtable {
   1974 entry:
   1975   fence singlethread seq_cst
   1976   ret void
   1977 }
   1978 ; CHECK: atomic_signal_fence_seq_cst
   1979 ; CHECK: call void @__tsan_atomic_signal_fence(i32 5)
   1980 
   1981 define void @atomic_thread_fence_seq_cst() nounwind uwtable {
   1982 entry:
   1983   fence  seq_cst
   1984   ret void
   1985 }
   1986 ; CHECK: atomic_thread_fence_seq_cst
   1987 ; CHECK: call void @__tsan_atomic_thread_fence(i32 5)
   1988