Home | History | Annotate | Download | only in X86
      1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
      2 ; RUN: llc < %s -mtriple=i386-unknown-unknown   | FileCheck %s --check-prefix=X32
      3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=X64
      4 
      5 define i32 @t1(i32 %t, i32 %val) nounwind {
      6 ; X32-LABEL: t1:
      7 ; X32:       # %bb.0:
      8 ; X32-NEXT:    movb {{[0-9]+}}(%esp), %cl
      9 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
     10 ; X32-NEXT:    shll %cl, %eax
     11 ; X32-NEXT:    retl
     12 ;
     13 ; X64-LABEL: t1:
     14 ; X64:       # %bb.0:
     15 ; X64-NEXT:    movl %edi, %ecx
     16 ; X64-NEXT:    shll %cl, %esi
     17 ; X64-NEXT:    movl %esi, %eax
     18 ; X64-NEXT:    retq
     19        %shamt = and i32 %t, 31
     20        %res = shl i32 %val, %shamt
     21        ret i32 %res
     22 }
     23 
     24 define i32 @t2(i32 %t, i32 %val) nounwind {
     25 ; X32-LABEL: t2:
     26 ; X32:       # %bb.0:
     27 ; X32-NEXT:    movb {{[0-9]+}}(%esp), %cl
     28 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
     29 ; X32-NEXT:    shll %cl, %eax
     30 ; X32-NEXT:    retl
     31 ;
     32 ; X64-LABEL: t2:
     33 ; X64:       # %bb.0:
     34 ; X64-NEXT:    movl %edi, %ecx
     35 ; X64-NEXT:    shll %cl, %esi
     36 ; X64-NEXT:    movl %esi, %eax
     37 ; X64-NEXT:    retq
     38        %shamt = and i32 %t, 63
     39        %res = shl i32 %val, %shamt
     40        ret i32 %res
     41 }
     42 
     43 @X = internal global i16 0
     44 
     45 define void @t3(i16 %t) nounwind {
     46 ; X32-LABEL: t3:
     47 ; X32:       # %bb.0:
     48 ; X32-NEXT:    movb {{[0-9]+}}(%esp), %cl
     49 ; X32-NEXT:    sarw %cl, X
     50 ; X32-NEXT:    retl
     51 ;
     52 ; X64-LABEL: t3:
     53 ; X64:       # %bb.0:
     54 ; X64-NEXT:    movl %edi, %ecx
     55 ; X64-NEXT:    sarw %cl, {{.*}}(%rip)
     56 ; X64-NEXT:    retq
     57        %shamt = and i16 %t, 31
     58        %tmp = load i16, i16* @X
     59        %tmp1 = ashr i16 %tmp, %shamt
     60        store i16 %tmp1, i16* @X
     61        ret void
     62 }
     63 
     64 define i64 @t4(i64 %t, i64 %val) nounwind {
     65 ; X32-LABEL: t4:
     66 ; X32:       # %bb.0:
     67 ; X32-NEXT:    pushl %esi
     68 ; X32-NEXT:    movb {{[0-9]+}}(%esp), %cl
     69 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
     70 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
     71 ; X32-NEXT:    movl %esi, %edx
     72 ; X32-NEXT:    shrl %cl, %edx
     73 ; X32-NEXT:    shrdl %cl, %esi, %eax
     74 ; X32-NEXT:    testb $32, %cl
     75 ; X32-NEXT:    je .LBB3_2
     76 ; X32-NEXT:  # %bb.1:
     77 ; X32-NEXT:    movl %edx, %eax
     78 ; X32-NEXT:    xorl %edx, %edx
     79 ; X32-NEXT:  .LBB3_2:
     80 ; X32-NEXT:    popl %esi
     81 ; X32-NEXT:    retl
     82 ;
     83 ; X64-LABEL: t4:
     84 ; X64:       # %bb.0:
     85 ; X64-NEXT:    movl %edi, %ecx
     86 ; X64-NEXT:    shrq %cl, %rsi
     87 ; X64-NEXT:    movq %rsi, %rax
     88 ; X64-NEXT:    retq
     89        %shamt = and i64 %t, 63
     90        %res = lshr i64 %val, %shamt
     91        ret i64 %res
     92 }
     93 
     94 define i64 @t5(i64 %t, i64 %val) nounwind {
     95 ; X32-LABEL: t5:
     96 ; X32:       # %bb.0:
     97 ; X32-NEXT:    pushl %esi
     98 ; X32-NEXT:    movb {{[0-9]+}}(%esp), %cl
     99 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
    100 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
    101 ; X32-NEXT:    movl %esi, %edx
    102 ; X32-NEXT:    shrl %cl, %edx
    103 ; X32-NEXT:    shrdl %cl, %esi, %eax
    104 ; X32-NEXT:    testb $32, %cl
    105 ; X32-NEXT:    je .LBB4_2
    106 ; X32-NEXT:  # %bb.1:
    107 ; X32-NEXT:    movl %edx, %eax
    108 ; X32-NEXT:    xorl %edx, %edx
    109 ; X32-NEXT:  .LBB4_2:
    110 ; X32-NEXT:    popl %esi
    111 ; X32-NEXT:    retl
    112 ;
    113 ; X64-LABEL: t5:
    114 ; X64:       # %bb.0:
    115 ; X64-NEXT:    movl %edi, %ecx
    116 ; X64-NEXT:    shrq %cl, %rsi
    117 ; X64-NEXT:    movq %rsi, %rax
    118 ; X64-NEXT:    retq
    119        %shamt = and i64 %t, 191
    120        %res = lshr i64 %val, %shamt
    121        ret i64 %res
    122 }
    123 
    124 define void @t5ptr(i64 %t, i64* %ptr) nounwind {
    125 ; X32-LABEL: t5ptr:
    126 ; X32:       # %bb.0:
    127 ; X32-NEXT:    pushl %edi
    128 ; X32-NEXT:    pushl %esi
    129 ; X32-NEXT:    movb {{[0-9]+}}(%esp), %cl
    130 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
    131 ; X32-NEXT:    movl (%eax), %edx
    132 ; X32-NEXT:    movl 4(%eax), %edi
    133 ; X32-NEXT:    movl %edi, %esi
    134 ; X32-NEXT:    shrl %cl, %esi
    135 ; X32-NEXT:    shrdl %cl, %edi, %edx
    136 ; X32-NEXT:    testb $32, %cl
    137 ; X32-NEXT:    je .LBB5_2
    138 ; X32-NEXT:  # %bb.1:
    139 ; X32-NEXT:    movl %esi, %edx
    140 ; X32-NEXT:    xorl %esi, %esi
    141 ; X32-NEXT:  .LBB5_2:
    142 ; X32-NEXT:    movl %esi, 4(%eax)
    143 ; X32-NEXT:    movl %edx, (%eax)
    144 ; X32-NEXT:    popl %esi
    145 ; X32-NEXT:    popl %edi
    146 ; X32-NEXT:    retl
    147 ;
    148 ; X64-LABEL: t5ptr:
    149 ; X64:       # %bb.0:
    150 ; X64-NEXT:    movl %edi, %ecx
    151 ; X64-NEXT:    shrq %cl, (%rsi)
    152 ; X64-NEXT:    retq
    153        %shamt = and i64 %t, 191
    154        %tmp = load i64, i64* %ptr
    155        %tmp1 = lshr i64 %tmp, %shamt
    156        store i64 %tmp1, i64* %ptr
    157        ret void
    158 }
    159 
    160 
    161 ; rdar://11866926
    162 define i64 @t6(i64 %key, i64* nocapture %val) nounwind {
    163 ; X32-LABEL: t6:
    164 ; X32:       # %bb.0:
    165 ; X32-NEXT:    pushl %edi
    166 ; X32-NEXT:    pushl %esi
    167 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
    168 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %esi
    169 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
    170 ; X32-NEXT:    shrdl $3, %eax, %esi
    171 ; X32-NEXT:    movl %eax, %edi
    172 ; X32-NEXT:    shrl $3, %edi
    173 ; X32-NEXT:    movl (%ecx), %eax
    174 ; X32-NEXT:    movl 4(%ecx), %edx
    175 ; X32-NEXT:    addl $-1, %eax
    176 ; X32-NEXT:    adcl $-1, %edx
    177 ; X32-NEXT:    andl %esi, %eax
    178 ; X32-NEXT:    andl %edi, %edx
    179 ; X32-NEXT:    popl %esi
    180 ; X32-NEXT:    popl %edi
    181 ; X32-NEXT:    retl
    182 ;
    183 ; X64-LABEL: t6:
    184 ; X64:       # %bb.0:
    185 ; X64-NEXT:    shrq $3, %rdi
    186 ; X64-NEXT:    movq (%rsi), %rax
    187 ; X64-NEXT:    decq %rax
    188 ; X64-NEXT:    andq %rdi, %rax
    189 ; X64-NEXT:    retq
    190   %shr = lshr i64 %key, 3
    191   %1 = load i64, i64* %val, align 8
    192   %sub = add i64 %1, 2305843009213693951
    193   %and = and i64 %sub, %shr
    194   ret i64 %and
    195 }
    196 
    197 define i64 @big_mask_constant(i64 %x) nounwind {
    198 ; X32-LABEL: big_mask_constant:
    199 ; X32:       # %bb.0:
    200 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
    201 ; X32-NEXT:    andl $4, %eax
    202 ; X32-NEXT:    shll $25, %eax
    203 ; X32-NEXT:    xorl %edx, %edx
    204 ; X32-NEXT:    retl
    205 ;
    206 ; X64-LABEL: big_mask_constant:
    207 ; X64:       # %bb.0:
    208 ; X64-NEXT:    shrq $7, %rdi
    209 ; X64-NEXT:    andl $134217728, %edi # imm = 0x8000000
    210 ; X64-NEXT:    movq %rdi, %rax
    211 ; X64-NEXT:    retq
    212   %and = and i64 %x, 17179869184 ; 0x400000000
    213   %sh = lshr i64 %and, 7
    214   ret i64 %sh
    215 }
    216 
    217