Home | History | Annotate | Download | only in ValueTracking
      1 ; RUN: opt -print-memderefs -analyze -S <%s | FileCheck %s
      2 
      3 ; Uses the print-deref (+ analyze to print) pass to run
      4 ; isDereferenceablePointer() on many load instruction operands
      5 
      6 target datalayout = "e-i32:32:64"
      7 
      8 %TypeOpaque = type opaque
      9 
     10 declare zeroext i1 @return_i1()
     11 
     12 declare i32* @foo()
     13 @globalstr = global [6 x i8] c"hello\00"
     14 @globali32ptr = external global i32*
     15 
     16 %struct.A = type { [8 x i8], [5 x i8] }
     17 @globalstruct = external global %struct.A
     18 
     19 @globalptr.align1 = external global i8, align 1
     20 @globalptr.align16 = external global i8, align 16
     21 
     22 ; CHECK-LABEL: 'test'
     23 define void @test(i32 addrspace(1)* dereferenceable(8) %dparam,
     24                   i8 addrspace(1)* dereferenceable(32) align 1 %dparam.align1,
     25                   i8 addrspace(1)* dereferenceable(32) align 16 %dparam.align16)
     26     gc "statepoint-example" {
     27 ; CHECK: The following are dereferenceable:
     28 entry:
     29 ; CHECK: %globalptr{{.*}}(aligned)
     30     %globalptr = getelementptr inbounds [6 x i8], [6 x i8]* @globalstr, i32 0, i32 0
     31     %load1 = load i8, i8* %globalptr
     32 
     33 ; CHECK: %alloca{{.*}}(aligned)
     34     %alloca = alloca i1
     35     %load2 = load i1, i1* %alloca
     36 
     37 ; CHECK: %dparam{{.*}}(aligned)
     38     %load3 = load i32, i32 addrspace(1)* %dparam
     39 
     40 ; CHECK: %relocate{{.*}}(aligned)
     41     %tok = tail call token (i64, i32, i1 ()*, i32, i32, ...) @llvm.experimental.gc.statepoint.p0f_i1f(i64 0, i32 0, i1 ()* @return_i1, i32 0, i32 0, i32 0, i32 0, i32 addrspace(1)* %dparam)
     42     %relocate = call i32 addrspace(1)* @llvm.experimental.gc.relocate.p1i32(token %tok, i32 7, i32 7)
     43     %load4 = load i32, i32 addrspace(1)* %relocate
     44 
     45 ; CHECK-NOT: %nparam
     46     %dpa = call i32 addrspace(1)* @func1(i32 addrspace(1)* %dparam)
     47     %nparam = getelementptr i32, i32 addrspace(1)* %dpa, i32 5
     48     %load5 = load i32, i32 addrspace(1)* %nparam
     49 
     50     ; Load from a non-dereferenceable load
     51 ; CHECK-NOT: %nd_load
     52     %nd_load = load i32*, i32** @globali32ptr
     53     %load6 = load i32, i32* %nd_load
     54 
     55     ; Load from a dereferenceable load
     56 ; CHECK: %d4_load{{.*}}(aligned)
     57     %d4_load = load i32*, i32** @globali32ptr, !dereferenceable !0
     58     %load7 = load i32, i32* %d4_load
     59 
     60     ; Load from an offset not covered by the dereferenceable portion
     61 ; CHECK-NOT: %d2_load
     62     %d2_load = load i32*, i32** @globali32ptr, !dereferenceable !1
     63     %load8 = load i32, i32* %d2_load
     64 
     65     ; Load from a potentially null pointer with dereferenceable_or_null
     66 ; CHECK-NOT: %d_or_null_load
     67     %d_or_null_load = load i32*, i32** @globali32ptr, !dereferenceable_or_null !0
     68     %load9 = load i32, i32* %d_or_null_load
     69 
     70     ; Load from a non-null pointer with dereferenceable_or_null
     71 ; CHECK: %d_or_null_non_null_load{{.*}}(aligned)
     72     %d_or_null_non_null_load = load i32*, i32** @globali32ptr, !nonnull !2, !dereferenceable_or_null !0
     73     %load10 = load i32, i32* %d_or_null_non_null_load
     74 
     75     ; It's OK to overrun static array size as long as we stay within underlying object size
     76 ; CHECK: %within_allocation{{.*}}(aligned)
     77     %within_allocation = getelementptr inbounds %struct.A, %struct.A* @globalstruct, i64 0, i32 0, i64 10
     78     %load11 = load i8, i8* %within_allocation
     79 
     80     ; GEP is outside the underlying object size
     81 ; CHECK-NOT: %outside_allocation
     82     %outside_allocation = getelementptr inbounds %struct.A, %struct.A* @globalstruct, i64 0, i32 1, i64 10
     83     %load12 = load i8, i8* %outside_allocation
     84 
     85     ; Loads from aligned globals
     86 ; CHECK: @globalptr.align1{{.*}}(unaligned)
     87 ; CHECK: @globalptr.align16{{.*}}(aligned)
     88     %load13 = load i8, i8* @globalptr.align1, align 16
     89     %load14 = load i8, i8* @globalptr.align16, align 16
     90 
     91     ; Loads from aligned arguments
     92 ; CHECK: %dparam.align1{{.*}}(unaligned)
     93 ; CHECK: %dparam.align16{{.*}}(aligned)
     94     %load15 = load i8, i8 addrspace(1)* %dparam.align1, align 16
     95     %load16 = load i8, i8 addrspace(1)* %dparam.align16, align 16
     96 
     97     ; Loads from aligned allocas
     98 ; CHECK: %alloca.align1{{.*}}(unaligned)
     99 ; CHECK: %alloca.align16{{.*}}(aligned)
    100     %alloca.align1 = alloca i1, align 1
    101     %alloca.align16 = alloca i1, align 16
    102     %load17 = load i1, i1* %alloca.align1, align 16
    103     %load18 = load i1, i1* %alloca.align16, align 16
    104 
    105     ; Loads from GEPs
    106 ; CHECK: %gep.align1.offset1{{.*}}(unaligned)
    107 ; CHECK: %gep.align16.offset1{{.*}}(unaligned)
    108 ; CHECK: %gep.align1.offset16{{.*}}(unaligned)
    109 ; CHECK: %gep.align16.offset16{{.*}}(aligned)
    110     %gep.align1.offset1 = getelementptr inbounds i8, i8 addrspace(1)* %dparam.align1, i32 1
    111     %gep.align16.offset1 = getelementptr inbounds i8, i8 addrspace(1)* %dparam.align16, i32 1
    112     %gep.align1.offset16 = getelementptr inbounds i8, i8 addrspace(1)* %dparam.align1, i32 16
    113     %gep.align16.offset16 = getelementptr inbounds i8, i8 addrspace(1)* %dparam.align16, i32 16
    114     %load19 = load i8, i8 addrspace(1)* %gep.align1.offset1, align 16
    115     %load20 = load i8, i8 addrspace(1)* %gep.align16.offset1, align 16
    116     %load21 = load i8, i8 addrspace(1)* %gep.align1.offset16, align 16
    117     %load22 = load i8, i8 addrspace(1)* %gep.align16.offset16, align 16
    118 
    119 ; CHECK-NOT: %no_deref_return
    120 ; CHECK: %deref_return{{.*}}(unaligned)
    121 ; CHECK: %deref_and_aligned_return{{.*}}(aligned)
    122     %no_deref_return = call i32* @foo()
    123     %deref_return = call dereferenceable(32) i32* @foo()
    124     %deref_and_aligned_return = call dereferenceable(32) align 16 i32* @foo()
    125     %load23 = load i32, i32* %no_deref_return
    126     %load24 = load i32, i32* %deref_return, align 16
    127     %load25 = load i32, i32* %deref_and_aligned_return, align 16
    128 
    129     ; Load from a dereferenceable and aligned load
    130 ; CHECK: %d4_unaligned_load{{.*}}(unaligned)
    131 ; CHECK: %d4_aligned_load{{.*}}(aligned)
    132     %d4_unaligned_load = load i32*, i32** @globali32ptr, !dereferenceable !0
    133     %d4_aligned_load = load i32*, i32** @globali32ptr, !dereferenceable !0, !align !{i64 16}
    134     %load26 = load i32, i32* %d4_unaligned_load, align 16
    135     %load27 = load i32, i32* %d4_aligned_load, align 16
    136 
    137    ; Alloca with no explicit alignment is aligned to preferred alignment of
    138    ; the type (specified by datalayout string).
    139 ; CHECK: %alloca.noalign{{.*}}(aligned)
    140     %alloca.noalign = alloca i32
    141     %load28 = load i32, i32* %alloca.noalign, align 8
    142 
    143     ret void
    144 }
    145 
    146 ; Just check that we don't crash.
    147 ; CHECK-LABEL: 'opaque_type_crasher'
    148 define void @opaque_type_crasher(%TypeOpaque* dereferenceable(16) %a) {
    149 entry:
    150   %bc = bitcast %TypeOpaque* %a to i8*
    151   %ptr8 = getelementptr inbounds i8, i8* %bc, i32 8
    152   %ptr32 = bitcast i8* %ptr8 to i32*
    153   br i1 undef, label %if.then, label %if.end
    154 
    155 if.then:
    156   %res = load i32, i32* %ptr32, align 4
    157   br label %if.end
    158 
    159 if.end:
    160   ret void
    161 }
    162 
    163 declare token @llvm.experimental.gc.statepoint.p0f_i1f(i64, i32, i1 ()*, i32, i32, ...)
    164 declare i32 addrspace(1)* @llvm.experimental.gc.relocate.p1i32(token, i32, i32)
    165 
    166 declare i32 addrspace(1)* @func1(i32 addrspace(1)* returned) nounwind argmemonly
    167 
    168 !0 = !{i64 4}
    169 !1 = !{i64 2}
    170 !2 = !{}
    171