Home | History | Annotate | Download | only in MemorySanitizer
      1 ; RUN: opt < %s -msan -msan-check-access-address=0 -S | FileCheck %s
      2 
      3 target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
      4 target triple = "x86_64-unknown-linux-gnu"
      5 
      6 ; atomicrmw xchg: store clean shadow, return clean shadow
      7 
      8 define i32 @AtomicRmwXchg(i32* %p, i32 %x) sanitize_memory {
      9 entry:
     10   %0 = atomicrmw xchg i32* %p, i32 %x seq_cst
     11   ret i32 %0
     12 }
     13 
     14 ; CHECK: @AtomicRmwXchg
     15 ; CHECK: store i32 0,
     16 ; CHECK: atomicrmw xchg {{.*}} seq_cst
     17 ; CHECK: store i32 0, {{.*}} @__msan_retval_tls
     18 ; CHECK: ret i32
     19 
     20 
     21 ; atomicrmw max: exactly the same as above
     22 
     23 define i32 @AtomicRmwMax(i32* %p, i32 %x) sanitize_memory {
     24 entry:
     25   %0 = atomicrmw max i32* %p, i32 %x seq_cst
     26   ret i32 %0
     27 }
     28 
     29 ; CHECK: @AtomicRmwMax
     30 ; CHECK: store i32 0,
     31 ; CHECK: atomicrmw max {{.*}} seq_cst
     32 ; CHECK: store i32 0, {{.*}} @__msan_retval_tls
     33 ; CHECK: ret i32
     34 
     35 
     36 ; cmpxchg: the same as above, but also check %a shadow
     37 
     38 define i32 @Cmpxchg(i32* %p, i32 %a, i32 %b) sanitize_memory {
     39 entry:
     40   %pair = cmpxchg i32* %p, i32 %a, i32 %b seq_cst seq_cst
     41   %0 = extractvalue { i32, i1 } %pair, 0
     42   ret i32 %0
     43 }
     44 
     45 ; CHECK: @Cmpxchg
     46 ; CHECK: store { i32, i1 } zeroinitializer,
     47 ; CHECK: icmp
     48 ; CHECK: br
     49 ; CHECK: @__msan_warning
     50 ; CHECK: cmpxchg {{.*}} seq_cst seq_cst
     51 ; CHECK: store i32 0, {{.*}} @__msan_retval_tls
     52 ; CHECK: ret i32
     53 
     54 
     55 ; relaxed cmpxchg: bump up to "release monotonic"
     56 
     57 define i32 @CmpxchgMonotonic(i32* %p, i32 %a, i32 %b) sanitize_memory {
     58 entry:
     59   %pair = cmpxchg i32* %p, i32 %a, i32 %b monotonic monotonic
     60   %0 = extractvalue { i32, i1 } %pair, 0
     61   ret i32 %0
     62 }
     63 
     64 ; CHECK: @CmpxchgMonotonic
     65 ; CHECK: store { i32, i1 } zeroinitializer,
     66 ; CHECK: icmp
     67 ; CHECK: br
     68 ; CHECK: @__msan_warning
     69 ; CHECK: cmpxchg {{.*}} release monotonic
     70 ; CHECK: store i32 0, {{.*}} @__msan_retval_tls
     71 ; CHECK: ret i32
     72 
     73 
     74 ; atomic load: preserve alignment, load shadow value after app value
     75 
     76 define i32 @AtomicLoad(i32* %p) sanitize_memory {
     77 entry:
     78   %0 = load atomic i32* %p seq_cst, align 16
     79   ret i32 %0
     80 }
     81 
     82 ; CHECK: @AtomicLoad
     83 ; CHECK: load atomic i32* {{.*}} seq_cst, align 16
     84 ; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32* {{.*}}, align 16
     85 ; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
     86 ; CHECK: ret i32
     87 
     88 
     89 ; atomic load: preserve alignment, load shadow value after app value
     90 
     91 define i32 @AtomicLoadAcquire(i32* %p) sanitize_memory {
     92 entry:
     93   %0 = load atomic i32* %p acquire, align 16
     94   ret i32 %0
     95 }
     96 
     97 ; CHECK: @AtomicLoadAcquire
     98 ; CHECK: load atomic i32* {{.*}} acquire, align 16
     99 ; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32* {{.*}}, align 16
    100 ; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
    101 ; CHECK: ret i32
    102 
    103 
    104 ; atomic load monotonic: bump up to load acquire
    105 
    106 define i32 @AtomicLoadMonotonic(i32* %p) sanitize_memory {
    107 entry:
    108   %0 = load atomic i32* %p monotonic, align 16
    109   ret i32 %0
    110 }
    111 
    112 ; CHECK: @AtomicLoadMonotonic
    113 ; CHECK: load atomic i32* {{.*}} acquire, align 16
    114 ; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32* {{.*}}, align 16
    115 ; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
    116 ; CHECK: ret i32
    117 
    118 
    119 ; atomic load unordered: bump up to load acquire
    120 
    121 define i32 @AtomicLoadUnordered(i32* %p) sanitize_memory {
    122 entry:
    123   %0 = load atomic i32* %p unordered, align 16
    124   ret i32 %0
    125 }
    126 
    127 ; CHECK: @AtomicLoadUnordered
    128 ; CHECK: load atomic i32* {{.*}} acquire, align 16
    129 ; CHECK: [[SHADOW:%[01-9a-z_]+]] = load i32* {{.*}}, align 16
    130 ; CHECK: store i32 {{.*}}[[SHADOW]], {{.*}} @__msan_retval_tls
    131 ; CHECK: ret i32
    132 
    133 
    134 ; atomic store: preserve alignment, store clean shadow value before app value
    135 
    136 define void @AtomicStore(i32* %p, i32 %x) sanitize_memory {
    137 entry:
    138   store atomic i32 %x, i32* %p seq_cst, align 16
    139   ret void
    140 }
    141 
    142 ; CHECK: @AtomicStore
    143 ; CHECK-NOT: @__msan_param_tls
    144 ; CHECK: store i32 0, i32* {{.*}}, align 16
    145 ; CHECK: store atomic i32 %x, i32* %p seq_cst, align 16
    146 ; CHECK: ret void
    147 
    148 
    149 ; atomic store: preserve alignment, store clean shadow value before app value
    150 
    151 define void @AtomicStoreRelease(i32* %p, i32 %x) sanitize_memory {
    152 entry:
    153   store atomic i32 %x, i32* %p release, align 16
    154   ret void
    155 }
    156 
    157 ; CHECK: @AtomicStoreRelease
    158 ; CHECK-NOT: @__msan_param_tls
    159 ; CHECK: store i32 0, i32* {{.*}}, align 16
    160 ; CHECK: store atomic i32 %x, i32* %p release, align 16
    161 ; CHECK: ret void
    162 
    163 
    164 ; atomic store monotonic: bumped up to store release
    165 
    166 define void @AtomicStoreMonotonic(i32* %p, i32 %x) sanitize_memory {
    167 entry:
    168   store atomic i32 %x, i32* %p monotonic, align 16
    169   ret void
    170 }
    171 
    172 ; CHECK: @AtomicStoreMonotonic
    173 ; CHECK-NOT: @__msan_param_tls
    174 ; CHECK: store i32 0, i32* {{.*}}, align 16
    175 ; CHECK: store atomic i32 %x, i32* %p release, align 16
    176 ; CHECK: ret void
    177 
    178 
    179 ; atomic store unordered: bumped up to store release
    180 
    181 define void @AtomicStoreUnordered(i32* %p, i32 %x) sanitize_memory {
    182 entry:
    183   store atomic i32 %x, i32* %p unordered, align 16
    184   ret void
    185 }
    186 
    187 ; CHECK: @AtomicStoreUnordered
    188 ; CHECK-NOT: @__msan_param_tls
    189 ; CHECK: store i32 0, i32* {{.*}}, align 16
    190 ; CHECK: store atomic i32 %x, i32* %p release, align 16
    191 ; CHECK: ret void
    192