Home | History | Annotate | Download | only in SystemZ
      1 ; Test zero extensions from an i32 to an i64.
      2 ;
      3 ; RUN: llc < %s -mtriple=s390x-linux-gnu | FileCheck %s
      4 
      5 ; Test register extension, starting with an i32.
      6 define i64 @f1(i32 %a) {
      7 ; CHECK-LABEL: f1:
      8 ; CHECK: llgfr %r2, %r2
      9 ; CHECK: br %r14
     10   %ext = zext i32 %a to i64
     11   ret i64 %ext
     12 }
     13 
     14 ; ...and again with an i64.
     15 define i64 @f2(i64 %a) {
     16 ; CHECK-LABEL: f2:
     17 ; CHECK: llgfr %r2, %r2
     18 ; CHECK: br %r14
     19   %word = trunc i64 %a to i32
     20   %ext = zext i32 %word to i64
     21   ret i64 %ext
     22 }
     23 
     24 ; Check ANDs that are equivalent to zero extension.
     25 define i64 @f3(i64 %a) {
     26 ; CHECK-LABEL: f3:
     27 ; CHECK: llgfr %r2, %r2
     28 ; CHECK: br %r14
     29   %ext = and i64 %a, 4294967295
     30   ret i64 %ext
     31 }
     32 
     33 ; Check LLGF with no displacement.
     34 define i64 @f4(i32 *%src) {
     35 ; CHECK-LABEL: f4:
     36 ; CHECK: llgf %r2, 0(%r2)
     37 ; CHECK: br %r14
     38   %word = load i32 *%src
     39   %ext = zext i32 %word to i64
     40   ret i64 %ext
     41 }
     42 
     43 ; Check the high end of the LLGF range.
     44 define i64 @f5(i32 *%src) {
     45 ; CHECK-LABEL: f5:
     46 ; CHECK: llgf %r2, 524284(%r2)
     47 ; CHECK: br %r14
     48   %ptr = getelementptr i32 *%src, i64 131071
     49   %word = load i32 *%ptr
     50   %ext = zext i32 %word to i64
     51   ret i64 %ext
     52 }
     53 
     54 ; Check the next word up, which needs separate address logic.
     55 ; Other sequences besides this one would be OK.
     56 define i64 @f6(i32 *%src) {
     57 ; CHECK-LABEL: f6:
     58 ; CHECK: agfi %r2, 524288
     59 ; CHECK: llgf %r2, 0(%r2)
     60 ; CHECK: br %r14
     61   %ptr = getelementptr i32 *%src, i64 131072
     62   %word = load i32 *%ptr
     63   %ext = zext i32 %word to i64
     64   ret i64 %ext
     65 }
     66 
     67 ; Check the high end of the negative LLGF range.
     68 define i64 @f7(i32 *%src) {
     69 ; CHECK-LABEL: f7:
     70 ; CHECK: llgf %r2, -4(%r2)
     71 ; CHECK: br %r14
     72   %ptr = getelementptr i32 *%src, i64 -1
     73   %word = load i32 *%ptr
     74   %ext = zext i32 %word to i64
     75   ret i64 %ext
     76 }
     77 
     78 ; Check the low end of the LLGF range.
     79 define i64 @f8(i32 *%src) {
     80 ; CHECK-LABEL: f8:
     81 ; CHECK: llgf %r2, -524288(%r2)
     82 ; CHECK: br %r14
     83   %ptr = getelementptr i32 *%src, i64 -131072
     84   %word = load i32 *%ptr
     85   %ext = zext i32 %word to i64
     86   ret i64 %ext
     87 }
     88 
     89 ; Check the next word down, which needs separate address logic.
     90 ; Other sequences besides this one would be OK.
     91 define i64 @f9(i32 *%src) {
     92 ; CHECK-LABEL: f9:
     93 ; CHECK: agfi %r2, -524292
     94 ; CHECK: llgf %r2, 0(%r2)
     95 ; CHECK: br %r14
     96   %ptr = getelementptr i32 *%src, i64 -131073
     97   %word = load i32 *%ptr
     98   %ext = zext i32 %word to i64
     99   ret i64 %ext
    100 }
    101 
    102 ; Check that LLGF allows an index.
    103 define i64 @f10(i64 %src, i64 %index) {
    104 ; CHECK-LABEL: f10:
    105 ; CHECK: llgf %r2, 524287(%r3,%r2)
    106 ; CHECK: br %r14
    107   %add1 = add i64 %src, %index
    108   %add2 = add i64 %add1, 524287
    109   %ptr = inttoptr i64 %add2 to i32 *
    110   %word = load i32 *%ptr
    111   %ext = zext i32 %word to i64
    112   ret i64 %ext
    113 }
    114 
    115 ; Test a case where we spill the source of at least one LLGFR.  We want
    116 ; to use LLGF if possible.
    117 define void @f11(i64 *%ptr1, i32 *%ptr2) {
    118 ; CHECK-LABEL: f11:
    119 ; CHECK: llgf {{%r[0-9]+}}, 16{{[04]}}(%r15)
    120 ; CHECK: br %r14
    121   %val0 = load volatile i32 *%ptr2
    122   %val1 = load volatile i32 *%ptr2
    123   %val2 = load volatile i32 *%ptr2
    124   %val3 = load volatile i32 *%ptr2
    125   %val4 = load volatile i32 *%ptr2
    126   %val5 = load volatile i32 *%ptr2
    127   %val6 = load volatile i32 *%ptr2
    128   %val7 = load volatile i32 *%ptr2
    129   %val8 = load volatile i32 *%ptr2
    130   %val9 = load volatile i32 *%ptr2
    131   %val10 = load volatile i32 *%ptr2
    132   %val11 = load volatile i32 *%ptr2
    133   %val12 = load volatile i32 *%ptr2
    134   %val13 = load volatile i32 *%ptr2
    135   %val14 = load volatile i32 *%ptr2
    136   %val15 = load volatile i32 *%ptr2
    137 
    138   %ext0 = zext i32 %val0 to i64
    139   %ext1 = zext i32 %val1 to i64
    140   %ext2 = zext i32 %val2 to i64
    141   %ext3 = zext i32 %val3 to i64
    142   %ext4 = zext i32 %val4 to i64
    143   %ext5 = zext i32 %val5 to i64
    144   %ext6 = zext i32 %val6 to i64
    145   %ext7 = zext i32 %val7 to i64
    146   %ext8 = zext i32 %val8 to i64
    147   %ext9 = zext i32 %val9 to i64
    148   %ext10 = zext i32 %val10 to i64
    149   %ext11 = zext i32 %val11 to i64
    150   %ext12 = zext i32 %val12 to i64
    151   %ext13 = zext i32 %val13 to i64
    152   %ext14 = zext i32 %val14 to i64
    153   %ext15 = zext i32 %val15 to i64
    154 
    155   store volatile i32 %val0, i32 *%ptr2
    156   store volatile i32 %val1, i32 *%ptr2
    157   store volatile i32 %val2, i32 *%ptr2
    158   store volatile i32 %val3, i32 *%ptr2
    159   store volatile i32 %val4, i32 *%ptr2
    160   store volatile i32 %val5, i32 *%ptr2
    161   store volatile i32 %val6, i32 *%ptr2
    162   store volatile i32 %val7, i32 *%ptr2
    163   store volatile i32 %val8, i32 *%ptr2
    164   store volatile i32 %val9, i32 *%ptr2
    165   store volatile i32 %val10, i32 *%ptr2
    166   store volatile i32 %val11, i32 *%ptr2
    167   store volatile i32 %val12, i32 *%ptr2
    168   store volatile i32 %val13, i32 *%ptr2
    169   store volatile i32 %val14, i32 *%ptr2
    170   store volatile i32 %val15, i32 *%ptr2
    171 
    172   store volatile i64 %ext0, i64 *%ptr1
    173   store volatile i64 %ext1, i64 *%ptr1
    174   store volatile i64 %ext2, i64 *%ptr1
    175   store volatile i64 %ext3, i64 *%ptr1
    176   store volatile i64 %ext4, i64 *%ptr1
    177   store volatile i64 %ext5, i64 *%ptr1
    178   store volatile i64 %ext6, i64 *%ptr1
    179   store volatile i64 %ext7, i64 *%ptr1
    180   store volatile i64 %ext8, i64 *%ptr1
    181   store volatile i64 %ext9, i64 *%ptr1
    182   store volatile i64 %ext10, i64 *%ptr1
    183   store volatile i64 %ext11, i64 *%ptr1
    184   store volatile i64 %ext12, i64 *%ptr1
    185   store volatile i64 %ext13, i64 *%ptr1
    186   store volatile i64 %ext14, i64 *%ptr1
    187   store volatile i64 %ext15, i64 *%ptr1
    188 
    189   ret void
    190 }
    191