Home | History | Annotate | Download | only in llvm2ice_tests
      1 ; This file is copied/adapted from llvm/test/NaCl/PNaClABI/abi-atomics.ll .
      2 ; TODO(stichnot): Find a way to share the file to avoid divergence.
      3 
      4 ; REQUIRES: allow_dump
      5 
      6 ; RUN: %p2i -i %s --args --verbose none --exit-success -threads=0 2>&1 \
      7 ; RUN:   | FileCheck %s
      8 
      9 declare i8 @llvm.nacl.atomic.load.i8(i8*, i32)
     10 declare i16 @llvm.nacl.atomic.load.i16(i16*, i32)
     11 declare i32 @llvm.nacl.atomic.load.i32(i32*, i32)
     12 declare i64 @llvm.nacl.atomic.load.i64(i64*, i32)
     13 declare void @llvm.nacl.atomic.store.i8(i8, i8*, i32)
     14 declare void @llvm.nacl.atomic.store.i16(i16, i16*, i32)
     15 declare void @llvm.nacl.atomic.store.i32(i32, i32*, i32)
     16 declare void @llvm.nacl.atomic.store.i64(i64, i64*, i32)
     17 declare i8 @llvm.nacl.atomic.rmw.i8(i32, i8*, i8, i32)
     18 declare i16 @llvm.nacl.atomic.rmw.i16(i32, i16*, i16, i32)
     19 declare i32 @llvm.nacl.atomic.rmw.i32(i32, i32*, i32, i32)
     20 declare i64 @llvm.nacl.atomic.rmw.i64(i32, i64*, i64, i32)
     21 declare i8 @llvm.nacl.atomic.cmpxchg.i8(i8*, i8, i8, i32, i32)
     22 declare i16 @llvm.nacl.atomic.cmpxchg.i16(i16*, i16, i16, i32, i32)
     23 declare i32 @llvm.nacl.atomic.cmpxchg.i32(i32*, i32, i32, i32, i32)
     24 declare i64 @llvm.nacl.atomic.cmpxchg.i64(i64*, i64, i64, i32, i32)
     25 declare void @llvm.nacl.atomic.fence(i32)
     26 declare void @llvm.nacl.atomic.fence.all()
     27 declare i1 @llvm.nacl.atomic.is.lock.free(i32, i8*)
     28 
     29 
     30 ; Load
     31 
     32 define internal i32 @test_load_invalid_7() {
     33   %ptr = inttoptr i32 undef to i32*
     34   %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 7)
     35   ret i32 %1
     36 }
     37 ; CHECK: test_load_invalid_7: Unexpected memory ordering for AtomicLoad
     38 
     39 define internal i32 @test_load_invalid_0() {
     40   %ptr = inttoptr i32 undef to i32*
     41   %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 0)
     42   ret i32 %1
     43 }
     44 ; CHECK: test_load_invalid_0: Unexpected memory ordering for AtomicLoad
     45 
     46 define internal i32 @test_load_seqcst() {
     47   %ptr = inttoptr i32 undef to i32*
     48   %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 6)
     49   ret i32 %1
     50 }
     51 ; CHECK-LABEL: test_load_seqcst
     52 
     53 define internal i32 @test_load_acqrel() {
     54   %ptr = inttoptr i32 undef to i32*
     55   %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 5)
     56   ret i32 %1
     57 }
     58 ; CHECK: test_load_acqrel: Unexpected memory ordering for AtomicLoad
     59 
     60 define internal i32 @test_load_release() {
     61   %ptr = inttoptr i32 undef to i32*
     62   %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 4)
     63   ret i32 %1
     64 }
     65 ; CHECK: test_load_release: Unexpected memory ordering for AtomicLoad
     66 
     67 define internal i32 @test_load_acquire() {
     68   %ptr = inttoptr i32 undef to i32*
     69   %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 3)
     70   ret i32 %1
     71 }
     72 ; CHECK-LABEL: test_load_acquire
     73 
     74 define internal i32 @test_load_consume() {
     75   %ptr = inttoptr i32 undef to i32*
     76   %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 2)
     77   ret i32 %1
     78 }
     79 ; CHECK: test_load_consume: Unexpected memory ordering for AtomicLoad
     80 
     81 define internal i32 @test_load_relaxed() {
     82   %ptr = inttoptr i32 undef to i32*
     83   %1 = call i32 @llvm.nacl.atomic.load.i32(i32* %ptr, i32 1)
     84   ret i32 %1
     85 }
     86 ; CHECK: test_load_relaxed: Unexpected memory ordering for AtomicLoad
     87 
     88 
     89 ; Store
     90 
     91 define internal void @test_store_invalid_7() {
     92   %ptr = inttoptr i32 undef to i32*
     93   call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 7)
     94   ret void
     95 }
     96 ; CHECK: test_store_invalid_7: Unexpected memory ordering for AtomicStore
     97 
     98 define internal void @test_store_invalid_0() {
     99   %ptr = inttoptr i32 undef to i32*
    100   call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 0)
    101   ret void
    102 }
    103 ; CHECK: test_store_invalid_0: Unexpected memory ordering for AtomicStore
    104 
    105 define internal void @test_store_seqcst() {
    106   %ptr = inttoptr i32 undef to i32*
    107   call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 6)
    108   ret void
    109 }
    110 ; CHECK-LABEL: test_store_seqcst
    111 
    112 define internal void @test_store_acqrel() {
    113   %ptr = inttoptr i32 undef to i32*
    114   call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 5)
    115   ret void
    116 }
    117 ; CHECK: test_store_acqrel: Unexpected memory ordering for AtomicStore
    118 
    119 define internal void @test_store_release() {
    120   %ptr = inttoptr i32 undef to i32*
    121   call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 4)
    122   ret void
    123 }
    124 ; CHECK-LABEL: test_store_release
    125 
    126 define internal void @test_store_acquire() {
    127   %ptr = inttoptr i32 undef to i32*
    128   call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 3)
    129   ret void
    130 }
    131 ; CHECK: test_store_acquire: Unexpected memory ordering for AtomicStore
    132 
    133 define internal void @test_store_consume() {
    134   %ptr = inttoptr i32 undef to i32*
    135   call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 2)
    136   ret void
    137 }
    138 ; CHECK: test_store_consume: Unexpected memory ordering for AtomicStore
    139 
    140 define internal void @test_store_relaxed() {
    141   %ptr = inttoptr i32 undef to i32*
    142   call void @llvm.nacl.atomic.store.i32(i32 undef, i32* %ptr, i32 1)
    143   ret void
    144 }
    145 ; CHECK: test_store_relaxed: Unexpected memory ordering for AtomicStore
    146 
    147 
    148 ; rmw
    149 
    150 define internal i32 @test_rmw_invalid_7() {
    151   %ptr = inttoptr i32 undef to i32*
    152   %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 7)
    153   ret i32 %1
    154 }
    155 ; CHECK: test_rmw_invalid_7: Unexpected memory ordering for AtomicRMW
    156 
    157 define internal i32 @test_rmw_invalid_0() {
    158   %ptr = inttoptr i32 undef to i32*
    159   %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 0)
    160   ret i32 %1
    161 }
    162 ; CHECK: test_rmw_invalid_0: Unexpected memory ordering for AtomicRMW
    163 
    164 define internal i32 @test_rmw_seqcst() {
    165   %ptr = inttoptr i32 undef to i32*
    166   %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 6)
    167   ret i32 %1
    168 }
    169 ; CHECK-LABEL: test_rmw_seqcst
    170 
    171 define internal i32 @test_rmw_acqrel() {
    172   %ptr = inttoptr i32 undef to i32*
    173   %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 5)
    174   ret i32 %1
    175 }
    176 ; CHECK-LABEL: test_rmw_acqrel
    177 
    178 define internal i32 @test_rmw_release() {
    179   %ptr = inttoptr i32 undef to i32*
    180   %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 4)
    181   ret i32 %1
    182 }
    183 ; CHECK-LABEL: test_rmw_release
    184 
    185 define internal i32 @test_rmw_acquire() {
    186   %ptr = inttoptr i32 undef to i32*
    187   %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 3)
    188   ret i32 %1
    189 }
    190 ; CHECK-LABEL: test_rmw_acquire
    191 
    192 define internal i32 @test_rmw_consume() {
    193   %ptr = inttoptr i32 undef to i32*
    194   %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 2)
    195   ret i32 %1
    196 }
    197 ; CHECK: test_rmw_consume: Unexpected memory ordering for AtomicRMW
    198 
    199 define internal i32 @test_rmw_relaxed() {
    200   %ptr = inttoptr i32 undef to i32*
    201   %1 = call i32 @llvm.nacl.atomic.rmw.i32(i32 1, i32* %ptr, i32 0, i32 1)
    202   ret i32 %1
    203 }
    204 ; CHECK: test_rmw_relaxed: Unexpected memory ordering for AtomicRMW
    205 
    206 
    207 ; cmpxchg
    208 
    209 define internal i32 @test_cmpxchg_invalid_7(i32 %oldval, i32 %newval) {
    210   %ptr = inttoptr i32 undef to i32*
    211   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 7, i32 7)
    212   ret i32 %1
    213 }
    214 ; CHECK: test_cmpxchg_invalid_7: Unexpected memory ordering for AtomicCmpxchg
    215 
    216 define internal i32 @test_cmpxchg_invalid_0(i32 %oldval, i32 %newval) {
    217   %ptr = inttoptr i32 undef to i32*
    218   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 0, i32 0)
    219   ret i32 %1
    220 }
    221 ; CHECK: test_cmpxchg_invalid_0: Unexpected memory ordering for AtomicCmpxchg
    222 
    223 ; seq_cst
    224 
    225 define internal i32 @test_cmpxchg_seqcst_seqcst(i32 %oldval, i32 %newval) {
    226   %ptr = inttoptr i32 undef to i32*
    227   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 6, i32 6)
    228   ret i32 %1
    229 }
    230 ; CHECK-LABEL: test_cmpxchg_seqcst_seqcst
    231 
    232 define internal i32 @test_cmpxchg_seqcst_acqrel(i32 %oldval, i32 %newval) {
    233   %ptr = inttoptr i32 undef to i32*
    234   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 6, i32 5)
    235   ret i32 %1
    236 }
    237 ; CHECK: test_cmpxchg_seqcst_acqrel: Unexpected memory ordering for AtomicCmpxchg
    238 
    239 define internal i32 @test_cmpxchg_seqcst_release(i32 %oldval, i32 %newval) {
    240   %ptr = inttoptr i32 undef to i32*
    241   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 6, i32 4)
    242   ret i32 %1
    243 }
    244 ; CHECK: test_cmpxchg_seqcst_release: Unexpected memory ordering for AtomicCmpxchg
    245 
    246 define internal i32 @test_cmpxchg_seqcst_acquire(i32 %oldval, i32 %newval) {
    247   %ptr = inttoptr i32 undef to i32*
    248   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 6, i32 3)
    249   ret i32 %1
    250 }
    251 ; CHECK-LABEL: test_cmpxchg_seqcst_acquire
    252 
    253 define internal i32 @test_cmpxchg_seqcst_consume(i32 %oldval, i32 %newval) {
    254   %ptr = inttoptr i32 undef to i32*
    255   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 6, i32 2)
    256   ret i32 %1
    257 }
    258 ; CHECK: test_cmpxchg_seqcst_consume: Unexpected memory ordering for AtomicCmpxchg
    259 
    260 define internal i32 @test_cmpxchg_seqcst_relaxed(i32 %oldval, i32 %newval) {
    261   %ptr = inttoptr i32 undef to i32*
    262   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 6, i32 1)
    263   ret i32 %1
    264 }
    265 ; CHECK: test_cmpxchg_seqcst_relaxed: Unexpected memory ordering for AtomicCmpxchg
    266 
    267 ; acq_rel
    268 
    269 define internal i32 @test_cmpxchg_acqrel_seqcst(i32 %oldval, i32 %newval) {
    270   %ptr = inttoptr i32 undef to i32*
    271   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 5, i32 6)
    272   ret i32 %1
    273 }
    274 ; CHECK: test_cmpxchg_acqrel_seqcst: Unexpected memory ordering for AtomicCmpxchg
    275 
    276 define internal i32 @test_cmpxchg_acqrel_acqrel(i32 %oldval, i32 %newval) {
    277   %ptr = inttoptr i32 undef to i32*
    278   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 5, i32 5)
    279   ret i32 %1
    280 }
    281 ; CHECK: test_cmpxchg_acqrel_acqrel: Unexpected memory ordering for AtomicCmpxchg
    282 
    283 define internal i32 @test_cmpxchg_acqrel_release(i32 %oldval, i32 %newval) {
    284   %ptr = inttoptr i32 undef to i32*
    285   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 5, i32 4)
    286   ret i32 %1
    287 }
    288 ; CHECK: test_cmpxchg_acqrel_release: Unexpected memory ordering for AtomicCmpxchg
    289 
    290 define internal i32 @test_cmpxchg_acqrel_acquire(i32 %oldval, i32 %newval) {
    291   %ptr = inttoptr i32 undef to i32*
    292   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 5, i32 3)
    293   ret i32 %1
    294 }
    295 ; CHECK-LABEL: test_cmpxchg_acqrel_acquire
    296 
    297 define internal i32 @test_cmpxchg_acqrel_consume(i32 %oldval, i32 %newval) {
    298   %ptr = inttoptr i32 undef to i32*
    299   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 5, i32 2)
    300   ret i32 %1
    301 }
    302 ; CHECK: test_cmpxchg_acqrel_consume: Unexpected memory ordering for AtomicCmpxchg
    303 
    304 define internal i32 @test_cmpxchg_acqrel_relaxed(i32 %oldval, i32 %newval) {
    305   %ptr = inttoptr i32 undef to i32*
    306   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 5, i32 1)
    307   ret i32 %1
    308 }
    309 ; CHECK: test_cmpxchg_acqrel_relaxed: Unexpected memory ordering for AtomicCmpxchg
    310 
    311 ; release
    312 
    313 define internal i32 @test_cmpxchg_release_seqcst(i32 %oldval, i32 %newval) {
    314   %ptr = inttoptr i32 undef to i32*
    315   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 4, i32 6)
    316   ret i32 %1
    317 }
    318 ; CHECK: test_cmpxchg_release_seqcst: Unexpected memory ordering for AtomicCmpxchg
    319 
    320 define internal i32 @test_cmpxchg_release_acqrel(i32 %oldval, i32 %newval) {
    321   %ptr = inttoptr i32 undef to i32*
    322   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 4, i32 5)
    323   ret i32 %1
    324 }
    325 ; CHECK: test_cmpxchg_release_acqrel: Unexpected memory ordering for AtomicCmpxchg
    326 
    327 define internal i32 @test_cmpxchg_release_release(i32 %oldval, i32 %newval) {
    328   %ptr = inttoptr i32 undef to i32*
    329   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 4, i32 4)
    330   ret i32 %1
    331 }
    332 ; CHECK: test_cmpxchg_release_release: Unexpected memory ordering for AtomicCmpxchg
    333 
    334 define internal i32 @test_cmpxchg_release_acquire(i32 %oldval, i32 %newval) {
    335   %ptr = inttoptr i32 undef to i32*
    336   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 4, i32 3)
    337   ret i32 %1
    338 }
    339 ; CHECK: test_cmpxchg_release_acquire: Unexpected memory ordering for AtomicCmpxchg
    340 
    341 define internal i32 @test_cmpxchg_release_consume(i32 %oldval, i32 %newval) {
    342   %ptr = inttoptr i32 undef to i32*
    343   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 4, i32 2)
    344   ret i32 %1
    345 }
    346 ; CHECK: test_cmpxchg_release_consume: Unexpected memory ordering for AtomicCmpxchg
    347 
    348 define internal i32 @test_cmpxchg_release_relaxed(i32 %oldval, i32 %newval) {
    349   %ptr = inttoptr i32 undef to i32*
    350   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 4, i32 1)
    351   ret i32 %1
    352 }
    353 ; CHECK: test_cmpxchg_release_relaxed: Unexpected memory ordering for AtomicCmpxchg
    354 
    355 ; acquire
    356 
    357 define internal i32 @test_cmpxchg_acquire_seqcst(i32 %oldval, i32 %newval) {
    358   %ptr = inttoptr i32 undef to i32*
    359   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 3, i32 6)
    360   ret i32 %1
    361 }
    362 ; CHECK: test_cmpxchg_acquire_seqcst: Unexpected memory ordering for AtomicCmpxchg
    363 
    364 define internal i32 @test_cmpxchg_acquire_acqrel(i32 %oldval, i32 %newval) {
    365   %ptr = inttoptr i32 undef to i32*
    366   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 3, i32 5)
    367   ret i32 %1
    368 }
    369 ; CHECK: test_cmpxchg_acquire_acqrel: Unexpected memory ordering for AtomicCmpxchg
    370 
    371 define internal i32 @test_cmpxchg_acquire_release(i32 %oldval, i32 %newval) {
    372   %ptr = inttoptr i32 undef to i32*
    373   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 3, i32 4)
    374   ret i32 %1
    375 }
    376 ; CHECK: test_cmpxchg_acquire_release: Unexpected memory ordering for AtomicCmpxchg
    377 
    378 define internal i32 @test_cmpxchg_acquire_acquire(i32 %oldval, i32 %newval) {
    379   %ptr = inttoptr i32 undef to i32*
    380   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 3, i32 3)
    381   ret i32 %1
    382 }
    383 ; CHECK-LABEL: test_cmpxchg_acquire_acquire
    384 
    385 define internal i32 @test_cmpxchg_acquire_consume(i32 %oldval, i32 %newval) {
    386   %ptr = inttoptr i32 undef to i32*
    387   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 3, i32 2)
    388   ret i32 %1
    389 }
    390 ; CHECK: test_cmpxchg_acquire_consume: Unexpected memory ordering for AtomicCmpxchg
    391 
    392 define internal i32 @test_cmpxchg_acquire_relaxed(i32 %oldval, i32 %newval) {
    393   %ptr = inttoptr i32 undef to i32*
    394   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 3, i32 1)
    395   ret i32 %1
    396 }
    397 ; CHECK: test_cmpxchg_acquire_relaxed: Unexpected memory ordering for AtomicCmpxchg
    398 
    399 ; consume
    400 
    401 define internal i32 @test_cmpxchg_consume_seqcst(i32 %oldval, i32 %newval) {
    402   %ptr = inttoptr i32 undef to i32*
    403   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 2, i32 6)
    404   ret i32 %1
    405 }
    406 ; CHECK: test_cmpxchg_consume_seqcst: Unexpected memory ordering for AtomicCmpxchg
    407 
    408 define internal i32 @test_cmpxchg_consume_acqrel(i32 %oldval, i32 %newval) {
    409   %ptr = inttoptr i32 undef to i32*
    410   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 2, i32 5)
    411   ret i32 %1
    412 }
    413 ; CHECK: test_cmpxchg_consume_acqrel: Unexpected memory ordering for AtomicCmpxchg
    414 
    415 define internal i32 @test_cmpxchg_consume_release(i32 %oldval, i32 %newval) {
    416   %ptr = inttoptr i32 undef to i32*
    417   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 2, i32 4)
    418   ret i32 %1
    419 }
    420 ; CHECK: test_cmpxchg_consume_release: Unexpected memory ordering for AtomicCmpxchg
    421 
    422 define internal i32 @test_cmpxchg_consume_acquire(i32 %oldval, i32 %newval) {
    423   %ptr = inttoptr i32 undef to i32*
    424   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 2, i32 3)
    425   ret i32 %1
    426 }
    427 ; CHECK: test_cmpxchg_consume_acquire: Unexpected memory ordering for AtomicCmpxchg
    428 
    429 define internal i32 @test_cmpxchg_consume_consume(i32 %oldval, i32 %newval) {
    430   %ptr = inttoptr i32 undef to i32*
    431   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 2, i32 2)
    432   ret i32 %1
    433 }
    434 ; CHECK: test_cmpxchg_consume_consume: Unexpected memory ordering for AtomicCmpxchg
    435 
    436 define internal i32 @test_cmpxchg_consume_relaxed(i32 %oldval, i32 %newval) {
    437   %ptr = inttoptr i32 undef to i32*
    438   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 2, i32 1)
    439   ret i32 %1
    440 }
    441 ; CHECK: test_cmpxchg_consume_relaxed: Unexpected memory ordering for AtomicCmpxchg
    442 
    443 ; relaxed
    444 
    445 define internal i32 @test_cmpxchg_relaxed_seqcst(i32 %oldval, i32 %newval) {
    446   %ptr = inttoptr i32 undef to i32*
    447   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 1, i32 6)
    448   ret i32 %1
    449 }
    450 ; CHECK: test_cmpxchg_relaxed_seqcst: Unexpected memory ordering for AtomicCmpxchg
    451 
    452 define internal i32 @test_cmpxchg_relaxed_acqrel(i32 %oldval, i32 %newval) {
    453   %ptr = inttoptr i32 undef to i32*
    454   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 1, i32 5)
    455   ret i32 %1
    456 }
    457 ; CHECK: test_cmpxchg_relaxed_acqrel: Unexpected memory ordering for AtomicCmpxchg
    458 
    459 define internal i32 @test_cmpxchg_relaxed_release(i32 %oldval, i32 %newval) {
    460   %ptr = inttoptr i32 undef to i32*
    461   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 1, i32 4)
    462   ret i32 %1
    463 }
    464 ; CHECK: test_cmpxchg_relaxed_release: Unexpected memory ordering for AtomicCmpxchg
    465 
    466 define internal i32 @test_cmpxchg_relaxed_acquire(i32 %oldval, i32 %newval) {
    467   %ptr = inttoptr i32 undef to i32*
    468   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 1, i32 3)
    469   ret i32 %1
    470 }
    471 ; CHECK: test_cmpxchg_relaxed_acquire: Unexpected memory ordering for AtomicCmpxchg
    472 
    473 define internal i32 @test_cmpxchg_relaxed_consume(i32 %oldval, i32 %newval) {
    474   %ptr = inttoptr i32 undef to i32*
    475   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 1, i32 2)
    476   ret i32 %1
    477 }
    478 ; CHECK: test_cmpxchg_relaxed_consume: Unexpected memory ordering for AtomicCmpxchg
    479 
    480 define internal i32 @test_cmpxchg_relaxed_relaxed(i32 %oldval, i32 %newval) {
    481   %ptr = inttoptr i32 undef to i32*
    482   %1 = call i32 @llvm.nacl.atomic.cmpxchg.i32(i32* %ptr, i32 %oldval, i32 %newval, i32 1, i32 1)
    483   ret i32 %1
    484 }
    485 ; CHECK: test_cmpxchg_relaxed_relaxed: Unexpected memory ordering for AtomicCmpxchg
    486 
    487 
    488 ; fence
    489 
    490 define internal void @test_fence_invalid_7() {
    491   call void @llvm.nacl.atomic.fence(i32 7)
    492   ret void
    493 }
    494 ; CHECK: test_fence_invalid_7: Unexpected memory ordering for AtomicFence
    495 
    496 define internal void @test_fence_invalid_0() {
    497   call void @llvm.nacl.atomic.fence(i32 0)
    498   ret void
    499 }
    500 ; CHECK: test_fence_invalid_0: Unexpected memory ordering for AtomicFence
    501 
    502 define internal void @test_fence_seqcst() {
    503   call void @llvm.nacl.atomic.fence(i32 6)
    504   ret void
    505 }
    506 ; CHECK-LABEL: test_fence_seqcst
    507 
    508 define internal void @test_fence_acqrel() {
    509   call void @llvm.nacl.atomic.fence(i32 5)
    510   ret void
    511 }
    512 ; CHECK-LABEL: test_fence_acqrel
    513 
    514 define internal void @test_fence_acquire() {
    515   call void @llvm.nacl.atomic.fence(i32 4)
    516   ret void
    517 }
    518 ; CHECK-LABEL: test_fence_acquire
    519 
    520 define internal void @test_fence_release() {
    521   call void @llvm.nacl.atomic.fence(i32 3)
    522   ret void
    523 }
    524 ; CHECK-LABEL: test_fence_release
    525 
    526 define internal void @test_fence_consume() {
    527   call void @llvm.nacl.atomic.fence(i32 2)
    528   ret void
    529 }
    530 ; CHECK: test_fence_consume: Unexpected memory ordering for AtomicFence
    531 
    532 define internal void @test_fence_relaxed() {
    533   call void @llvm.nacl.atomic.fence(i32 1)
    534   ret void
    535 }
    536 ; CHECK: test_fence_relaxed: Unexpected memory ordering for AtomicFence
    537