Home | History | Annotate | Download | only in X86
      1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
      2 ; RUN: llc -mtriple=i686-unknown-linux-gnu   < %s | FileCheck %s --check-prefixes=CHECK,X86,NOBMI2,X86-NOBMI2,FALLBACK0,X86-FALLBACK0
      3 ; RUN: llc -mtriple=x86_64-unknown-linux-gnu < %s | FileCheck %s --check-prefixes=CHECK,X64,NOBMI2,X64-NOBMI2,FALLBACK0,X64-FALLBACK0
      4 
      5 ; https://bugs.llvm.org/show_bug.cgi?id=38149
      6 
      7 ; We are truncating from wider width, and then sign-extending
      8 ; back to the original width. Then we equality-comparing orig and src.
      9 ; If they don't match, then we had signed truncation during truncation.
     10 
     11 ; This can be expressed in a several ways in IR:
     12 ;   trunc + sext + icmp eq <- not canonical
     13 ;   shl   + ashr + icmp eq
     14 ;   add          + icmp uge
     15 ;   add          + icmp ult/ule
     16 ; However only the simplest form (with two shifts) gets lowered best.
     17 
     18 ; ---------------------------------------------------------------------------- ;
     19 ; shl + ashr + icmp eq
     20 ; ---------------------------------------------------------------------------- ;
     21 
     22 define i1 @shifts_eqcmp_i16_i8(i16 %x) nounwind {
     23 ; X86-LABEL: shifts_eqcmp_i16_i8:
     24 ; X86:       # %bb.0:
     25 ; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
     26 ; X86-NEXT:    movsbl %al, %ecx
     27 ; X86-NEXT:    cmpw %ax, %cx
     28 ; X86-NEXT:    sete %al
     29 ; X86-NEXT:    retl
     30 ;
     31 ; X64-LABEL: shifts_eqcmp_i16_i8:
     32 ; X64:       # %bb.0:
     33 ; X64-NEXT:    movsbl %dil, %eax
     34 ; X64-NEXT:    cmpw %di, %ax
     35 ; X64-NEXT:    sete %al
     36 ; X64-NEXT:    retq
     37   %tmp0 = shl i16 %x, 8 ; 16-8
     38   %tmp1 = ashr exact i16 %tmp0, 8 ; 16-8
     39   %tmp2 = icmp eq i16 %tmp1, %x
     40   ret i1 %tmp2
     41 }
     42 
     43 define i1 @shifts_eqcmp_i32_i16(i32 %x) nounwind {
     44 ; X86-LABEL: shifts_eqcmp_i32_i16:
     45 ; X86:       # %bb.0:
     46 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
     47 ; X86-NEXT:    movswl %ax, %ecx
     48 ; X86-NEXT:    cmpl %eax, %ecx
     49 ; X86-NEXT:    sete %al
     50 ; X86-NEXT:    retl
     51 ;
     52 ; X64-LABEL: shifts_eqcmp_i32_i16:
     53 ; X64:       # %bb.0:
     54 ; X64-NEXT:    movswl %di, %eax
     55 ; X64-NEXT:    cmpl %edi, %eax
     56 ; X64-NEXT:    sete %al
     57 ; X64-NEXT:    retq
     58   %tmp0 = shl i32 %x, 16 ; 32-16
     59   %tmp1 = ashr exact i32 %tmp0, 16 ; 32-16
     60   %tmp2 = icmp eq i32 %tmp1, %x
     61   ret i1 %tmp2
     62 }
     63 
     64 define i1 @shifts_eqcmp_i32_i8(i32 %x) nounwind {
     65 ; X86-LABEL: shifts_eqcmp_i32_i8:
     66 ; X86:       # %bb.0:
     67 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
     68 ; X86-NEXT:    movsbl %al, %ecx
     69 ; X86-NEXT:    cmpl %eax, %ecx
     70 ; X86-NEXT:    sete %al
     71 ; X86-NEXT:    retl
     72 ;
     73 ; X64-LABEL: shifts_eqcmp_i32_i8:
     74 ; X64:       # %bb.0:
     75 ; X64-NEXT:    movsbl %dil, %eax
     76 ; X64-NEXT:    cmpl %edi, %eax
     77 ; X64-NEXT:    sete %al
     78 ; X64-NEXT:    retq
     79   %tmp0 = shl i32 %x, 24 ; 32-8
     80   %tmp1 = ashr exact i32 %tmp0, 24 ; 32-8
     81   %tmp2 = icmp eq i32 %tmp1, %x
     82   ret i1 %tmp2
     83 }
     84 
     85 define i1 @shifts_eqcmp_i64_i32(i64 %x) nounwind {
     86 ; X86-LABEL: shifts_eqcmp_i64_i32:
     87 ; X86:       # %bb.0:
     88 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
     89 ; X86-NEXT:    sarl $31, %eax
     90 ; X86-NEXT:    xorl {{[0-9]+}}(%esp), %eax
     91 ; X86-NEXT:    sete %al
     92 ; X86-NEXT:    retl
     93 ;
     94 ; X64-LABEL: shifts_eqcmp_i64_i32:
     95 ; X64:       # %bb.0:
     96 ; X64-NEXT:    movslq %edi, %rax
     97 ; X64-NEXT:    cmpq %rdi, %rax
     98 ; X64-NEXT:    sete %al
     99 ; X64-NEXT:    retq
    100   %tmp0 = shl i64 %x, 32 ; 64-32
    101   %tmp1 = ashr exact i64 %tmp0, 32 ; 64-32
    102   %tmp2 = icmp eq i64 %tmp1, %x
    103   ret i1 %tmp2
    104 }
    105 
    106 define i1 @shifts_eqcmp_i64_i16(i64 %x) nounwind {
    107 ; X86-LABEL: shifts_eqcmp_i64_i16:
    108 ; X86:       # %bb.0:
    109 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    110 ; X86-NEXT:    movswl %ax, %ecx
    111 ; X86-NEXT:    movl %ecx, %edx
    112 ; X86-NEXT:    sarl $31, %edx
    113 ; X86-NEXT:    xorl %eax, %ecx
    114 ; X86-NEXT:    xorl {{[0-9]+}}(%esp), %edx
    115 ; X86-NEXT:    orl %ecx, %edx
    116 ; X86-NEXT:    sete %al
    117 ; X86-NEXT:    retl
    118 ;
    119 ; X64-LABEL: shifts_eqcmp_i64_i16:
    120 ; X64:       # %bb.0:
    121 ; X64-NEXT:    movswq %di, %rax
    122 ; X64-NEXT:    cmpq %rdi, %rax
    123 ; X64-NEXT:    sete %al
    124 ; X64-NEXT:    retq
    125   %tmp0 = shl i64 %x, 48 ; 64-16
    126   %tmp1 = ashr exact i64 %tmp0, 48 ; 64-16
    127   %tmp2 = icmp eq i64 %tmp1, %x
    128   ret i1 %tmp2
    129 }
    130 
    131 define i1 @shifts_eqcmp_i64_i8(i64 %x) nounwind {
    132 ; X86-LABEL: shifts_eqcmp_i64_i8:
    133 ; X86:       # %bb.0:
    134 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    135 ; X86-NEXT:    movsbl %al, %ecx
    136 ; X86-NEXT:    movl %ecx, %edx
    137 ; X86-NEXT:    sarl $31, %edx
    138 ; X86-NEXT:    xorl %eax, %ecx
    139 ; X86-NEXT:    xorl {{[0-9]+}}(%esp), %edx
    140 ; X86-NEXT:    orl %ecx, %edx
    141 ; X86-NEXT:    sete %al
    142 ; X86-NEXT:    retl
    143 ;
    144 ; X64-LABEL: shifts_eqcmp_i64_i8:
    145 ; X64:       # %bb.0:
    146 ; X64-NEXT:    movsbq %dil, %rax
    147 ; X64-NEXT:    cmpq %rdi, %rax
    148 ; X64-NEXT:    sete %al
    149 ; X64-NEXT:    retq
    150   %tmp0 = shl i64 %x, 56 ; 64-8
    151   %tmp1 = ashr exact i64 %tmp0, 56 ; 64-8
    152   %tmp2 = icmp eq i64 %tmp1, %x
    153   ret i1 %tmp2
    154 }
    155 
    156 ; ---------------------------------------------------------------------------- ;
    157 ; add + icmp uge
    158 ; ---------------------------------------------------------------------------- ;
    159 
    160 define i1 @add_ugecmp_i16_i8(i16 %x) nounwind {
    161 ; X86-LABEL: add_ugecmp_i16_i8:
    162 ; X86:       # %bb.0:
    163 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    164 ; X86-NEXT:    addl $-128, %eax
    165 ; X86-NEXT:    movzwl %ax, %eax
    166 ; X86-NEXT:    cmpl $65279, %eax # imm = 0xFEFF
    167 ; X86-NEXT:    seta %al
    168 ; X86-NEXT:    retl
    169 ;
    170 ; X64-LABEL: add_ugecmp_i16_i8:
    171 ; X64:       # %bb.0:
    172 ; X64-NEXT:    addl $-128, %edi
    173 ; X64-NEXT:    movzwl %di, %eax
    174 ; X64-NEXT:    cmpl $65279, %eax # imm = 0xFEFF
    175 ; X64-NEXT:    seta %al
    176 ; X64-NEXT:    retq
    177   %tmp0 = add i16 %x, -128 ; ~0U << (8-1)
    178   %tmp1 = icmp uge i16 %tmp0, -256 ; ~0U << 8
    179   ret i1 %tmp1
    180 }
    181 
    182 define i1 @add_ugecmp_i32_i16(i32 %x) nounwind {
    183 ; X86-LABEL: add_ugecmp_i32_i16:
    184 ; X86:       # %bb.0:
    185 ; X86-NEXT:    movl $-32768, %eax # imm = 0x8000
    186 ; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
    187 ; X86-NEXT:    cmpl $-65537, %eax # imm = 0xFFFEFFFF
    188 ; X86-NEXT:    seta %al
    189 ; X86-NEXT:    retl
    190 ;
    191 ; X64-LABEL: add_ugecmp_i32_i16:
    192 ; X64:       # %bb.0:
    193 ; X64-NEXT:    addl $-32768, %edi # imm = 0x8000
    194 ; X64-NEXT:    cmpl $-65537, %edi # imm = 0xFFFEFFFF
    195 ; X64-NEXT:    seta %al
    196 ; X64-NEXT:    retq
    197   %tmp0 = add i32 %x, -32768 ; ~0U << (16-1)
    198   %tmp1 = icmp uge i32 %tmp0, -65536 ; ~0U << 16
    199   ret i1 %tmp1
    200 }
    201 
    202 define i1 @add_ugecmp_i32_i8(i32 %x) nounwind {
    203 ; X86-LABEL: add_ugecmp_i32_i8:
    204 ; X86:       # %bb.0:
    205 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    206 ; X86-NEXT:    addl $-128, %eax
    207 ; X86-NEXT:    cmpl $-257, %eax # imm = 0xFEFF
    208 ; X86-NEXT:    seta %al
    209 ; X86-NEXT:    retl
    210 ;
    211 ; X64-LABEL: add_ugecmp_i32_i8:
    212 ; X64:       # %bb.0:
    213 ; X64-NEXT:    addl $-128, %edi
    214 ; X64-NEXT:    cmpl $-257, %edi # imm = 0xFEFF
    215 ; X64-NEXT:    seta %al
    216 ; X64-NEXT:    retq
    217   %tmp0 = add i32 %x, -128 ; ~0U << (8-1)
    218   %tmp1 = icmp uge i32 %tmp0, -256 ; ~0U << 8
    219   ret i1 %tmp1
    220 }
    221 
    222 define i1 @add_ugecmp_i64_i32(i64 %x) nounwind {
    223 ; X86-LABEL: add_ugecmp_i64_i32:
    224 ; X86:       # %bb.0:
    225 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    226 ; X86-NEXT:    movl $-2147483648, %ecx # imm = 0x80000000
    227 ; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
    228 ; X86-NEXT:    adcl $-1, %eax
    229 ; X86-NEXT:    cmpl $-1, %eax
    230 ; X86-NEXT:    sete %al
    231 ; X86-NEXT:    retl
    232 ;
    233 ; X64-LABEL: add_ugecmp_i64_i32:
    234 ; X64:       # %bb.0:
    235 ; X64-NEXT:    addq $-2147483648, %rdi # imm = 0x80000000
    236 ; X64-NEXT:    movabsq $-4294967297, %rax # imm = 0xFFFFFFFEFFFFFFFF
    237 ; X64-NEXT:    cmpq %rax, %rdi
    238 ; X64-NEXT:    seta %al
    239 ; X64-NEXT:    retq
    240   %tmp0 = add i64 %x, -2147483648 ; ~0U << (32-1)
    241   %tmp1 = icmp uge i64 %tmp0, -4294967296 ; ~0U << 32
    242   ret i1 %tmp1
    243 }
    244 
    245 define i1 @add_ugecmp_i64_i16(i64 %x) nounwind {
    246 ; X86-LABEL: add_ugecmp_i64_i16:
    247 ; X86:       # %bb.0:
    248 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    249 ; X86-NEXT:    movl $-32768, %ecx # imm = 0x8000
    250 ; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
    251 ; X86-NEXT:    adcl $-1, %eax
    252 ; X86-NEXT:    movl $-65537, %edx # imm = 0xFFFEFFFF
    253 ; X86-NEXT:    cmpl %ecx, %edx
    254 ; X86-NEXT:    movl $-1, %ecx
    255 ; X86-NEXT:    sbbl %eax, %ecx
    256 ; X86-NEXT:    setb %al
    257 ; X86-NEXT:    retl
    258 ;
    259 ; X64-LABEL: add_ugecmp_i64_i16:
    260 ; X64:       # %bb.0:
    261 ; X64-NEXT:    addq $-32768, %rdi # imm = 0x8000
    262 ; X64-NEXT:    cmpq $-65537, %rdi # imm = 0xFFFEFFFF
    263 ; X64-NEXT:    seta %al
    264 ; X64-NEXT:    retq
    265   %tmp0 = add i64 %x, -32768 ; ~0U << (16-1)
    266   %tmp1 = icmp uge i64 %tmp0, -65536 ; ~0U << 16
    267   ret i1 %tmp1
    268 }
    269 
    270 define i1 @add_ugecmp_i64_i8(i64 %x) nounwind {
    271 ; X86-LABEL: add_ugecmp_i64_i8:
    272 ; X86:       # %bb.0:
    273 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    274 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
    275 ; X86-NEXT:    addl $-128, %eax
    276 ; X86-NEXT:    adcl $-1, %ecx
    277 ; X86-NEXT:    movl $-257, %edx # imm = 0xFEFF
    278 ; X86-NEXT:    cmpl %eax, %edx
    279 ; X86-NEXT:    movl $-1, %eax
    280 ; X86-NEXT:    sbbl %ecx, %eax
    281 ; X86-NEXT:    setb %al
    282 ; X86-NEXT:    retl
    283 ;
    284 ; X64-LABEL: add_ugecmp_i64_i8:
    285 ; X64:       # %bb.0:
    286 ; X64-NEXT:    addq $-128, %rdi
    287 ; X64-NEXT:    cmpq $-257, %rdi # imm = 0xFEFF
    288 ; X64-NEXT:    seta %al
    289 ; X64-NEXT:    retq
    290   %tmp0 = add i64 %x, -128 ; ~0U << (8-1)
    291   %tmp1 = icmp uge i64 %tmp0, -256 ; ~0U << 8
    292   ret i1 %tmp1
    293 }
    294 
    295 ; ---------------------------------------------------------------------------- ;
    296 ; add + icmp ult
    297 ; ---------------------------------------------------------------------------- ;
    298 
    299 define i1 @add_ultcmp_i16_i8(i16 %x) nounwind {
    300 ; X86-LABEL: add_ultcmp_i16_i8:
    301 ; X86:       # %bb.0:
    302 ; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
    303 ; X86-NEXT:    movsbl %al, %ecx
    304 ; X86-NEXT:    cmpw %ax, %cx
    305 ; X86-NEXT:    sete %al
    306 ; X86-NEXT:    retl
    307 ;
    308 ; X64-LABEL: add_ultcmp_i16_i8:
    309 ; X64:       # %bb.0:
    310 ; X64-NEXT:    movsbl %dil, %eax
    311 ; X64-NEXT:    cmpw %di, %ax
    312 ; X64-NEXT:    sete %al
    313 ; X64-NEXT:    retq
    314   %tmp0 = add i16 %x, 128 ; 1U << (8-1)
    315   %tmp1 = icmp ult i16 %tmp0, 256 ; 1U << 8
    316   ret i1 %tmp1
    317 }
    318 
    319 define i1 @add_ultcmp_i32_i16(i32 %x) nounwind {
    320 ; X86-LABEL: add_ultcmp_i32_i16:
    321 ; X86:       # %bb.0:
    322 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    323 ; X86-NEXT:    movswl %ax, %ecx
    324 ; X86-NEXT:    cmpl %eax, %ecx
    325 ; X86-NEXT:    sete %al
    326 ; X86-NEXT:    retl
    327 ;
    328 ; X64-LABEL: add_ultcmp_i32_i16:
    329 ; X64:       # %bb.0:
    330 ; X64-NEXT:    movswl %di, %eax
    331 ; X64-NEXT:    cmpl %edi, %eax
    332 ; X64-NEXT:    sete %al
    333 ; X64-NEXT:    retq
    334   %tmp0 = add i32 %x, 32768 ; 1U << (16-1)
    335   %tmp1 = icmp ult i32 %tmp0, 65536 ; 1U << 16
    336   ret i1 %tmp1
    337 }
    338 
    339 define i1 @add_ultcmp_i32_i8(i32 %x) nounwind {
    340 ; X86-LABEL: add_ultcmp_i32_i8:
    341 ; X86:       # %bb.0:
    342 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    343 ; X86-NEXT:    movsbl %al, %ecx
    344 ; X86-NEXT:    cmpl %eax, %ecx
    345 ; X86-NEXT:    sete %al
    346 ; X86-NEXT:    retl
    347 ;
    348 ; X64-LABEL: add_ultcmp_i32_i8:
    349 ; X64:       # %bb.0:
    350 ; X64-NEXT:    movsbl %dil, %eax
    351 ; X64-NEXT:    cmpl %edi, %eax
    352 ; X64-NEXT:    sete %al
    353 ; X64-NEXT:    retq
    354   %tmp0 = add i32 %x, 128 ; 1U << (8-1)
    355   %tmp1 = icmp ult i32 %tmp0, 256 ; 1U << 8
    356   ret i1 %tmp1
    357 }
    358 
    359 define i1 @add_ultcmp_i64_i32(i64 %x) nounwind {
    360 ; X86-LABEL: add_ultcmp_i64_i32:
    361 ; X86:       # %bb.0:
    362 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    363 ; X86-NEXT:    sarl $31, %eax
    364 ; X86-NEXT:    xorl {{[0-9]+}}(%esp), %eax
    365 ; X86-NEXT:    sete %al
    366 ; X86-NEXT:    retl
    367 ;
    368 ; X64-LABEL: add_ultcmp_i64_i32:
    369 ; X64:       # %bb.0:
    370 ; X64-NEXT:    movslq %edi, %rax
    371 ; X64-NEXT:    cmpq %rdi, %rax
    372 ; X64-NEXT:    sete %al
    373 ; X64-NEXT:    retq
    374   %tmp0 = add i64 %x, 2147483648 ; 1U << (32-1)
    375   %tmp1 = icmp ult i64 %tmp0, 4294967296 ; 1U << 32
    376   ret i1 %tmp1
    377 }
    378 
    379 define i1 @add_ultcmp_i64_i16(i64 %x) nounwind {
    380 ; X86-LABEL: add_ultcmp_i64_i16:
    381 ; X86:       # %bb.0:
    382 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    383 ; X86-NEXT:    movswl %ax, %ecx
    384 ; X86-NEXT:    xorl %ecx, %eax
    385 ; X86-NEXT:    sarl $31, %ecx
    386 ; X86-NEXT:    xorl {{[0-9]+}}(%esp), %ecx
    387 ; X86-NEXT:    orl %eax, %ecx
    388 ; X86-NEXT:    sete %al
    389 ; X86-NEXT:    retl
    390 ;
    391 ; X64-LABEL: add_ultcmp_i64_i16:
    392 ; X64:       # %bb.0:
    393 ; X64-NEXT:    movswq %di, %rax
    394 ; X64-NEXT:    cmpq %rdi, %rax
    395 ; X64-NEXT:    sete %al
    396 ; X64-NEXT:    retq
    397   %tmp0 = add i64 %x, 32768 ; 1U << (16-1)
    398   %tmp1 = icmp ult i64 %tmp0, 65536 ; 1U << 16
    399   ret i1 %tmp1
    400 }
    401 
    402 define i1 @add_ultcmp_i64_i8(i64 %x) nounwind {
    403 ; X86-LABEL: add_ultcmp_i64_i8:
    404 ; X86:       # %bb.0:
    405 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    406 ; X86-NEXT:    movsbl %al, %ecx
    407 ; X86-NEXT:    xorl %ecx, %eax
    408 ; X86-NEXT:    sarl $31, %ecx
    409 ; X86-NEXT:    xorl {{[0-9]+}}(%esp), %ecx
    410 ; X86-NEXT:    orl %eax, %ecx
    411 ; X86-NEXT:    sete %al
    412 ; X86-NEXT:    retl
    413 ;
    414 ; X64-LABEL: add_ultcmp_i64_i8:
    415 ; X64:       # %bb.0:
    416 ; X64-NEXT:    movsbq %dil, %rax
    417 ; X64-NEXT:    cmpq %rdi, %rax
    418 ; X64-NEXT:    sete %al
    419 ; X64-NEXT:    retq
    420   %tmp0 = add i64 %x, 128 ; 1U << (8-1)
    421   %tmp1 = icmp ult i64 %tmp0, 256 ; 1U << 8
    422   ret i1 %tmp1
    423 }
    424 
    425 ; Slightly more canonical variant
    426 define i1 @add_ulecmp_i16_i8(i16 %x) nounwind {
    427 ; X86-LABEL: add_ulecmp_i16_i8:
    428 ; X86:       # %bb.0:
    429 ; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
    430 ; X86-NEXT:    movsbl %al, %ecx
    431 ; X86-NEXT:    cmpw %ax, %cx
    432 ; X86-NEXT:    sete %al
    433 ; X86-NEXT:    retl
    434 ;
    435 ; X64-LABEL: add_ulecmp_i16_i8:
    436 ; X64:       # %bb.0:
    437 ; X64-NEXT:    movsbl %dil, %eax
    438 ; X64-NEXT:    cmpw %di, %ax
    439 ; X64-NEXT:    sete %al
    440 ; X64-NEXT:    retq
    441   %tmp0 = add i16 %x, 128 ; 1U << (8-1)
    442   %tmp1 = icmp ule i16 %tmp0, 255 ; (1U << 8) - 1
    443   ret i1 %tmp1
    444 }
    445 
    446 ; Negative tests
    447 ; ---------------------------------------------------------------------------- ;
    448 
    449 ; Adding not a constant
    450 define i1 @add_ultcmp_bad_i16_i8_add(i16 %x, i16 %y) nounwind {
    451 ; X86-LABEL: add_ultcmp_bad_i16_i8_add:
    452 ; X86:       # %bb.0:
    453 ; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
    454 ; X86-NEXT:    addw {{[0-9]+}}(%esp), %ax
    455 ; X86-NEXT:    movzwl %ax, %eax
    456 ; X86-NEXT:    cmpl $256, %eax # imm = 0x100
    457 ; X86-NEXT:    setb %al
    458 ; X86-NEXT:    retl
    459 ;
    460 ; X64-LABEL: add_ultcmp_bad_i16_i8_add:
    461 ; X64:       # %bb.0:
    462 ; X64-NEXT:    addl %esi, %edi
    463 ; X64-NEXT:    movzwl %di, %eax
    464 ; X64-NEXT:    cmpl $256, %eax # imm = 0x100
    465 ; X64-NEXT:    setb %al
    466 ; X64-NEXT:    retq
    467   %tmp0 = add i16 %x, %y
    468   %tmp1 = icmp ult i16 %tmp0, 256 ; 1U << 8
    469   ret i1 %tmp1
    470 }
    471 
    472 ; Comparing not with a constant
    473 define i1 @add_ultcmp_bad_i16_i8_cmp(i16 %x, i16 %y) nounwind {
    474 ; X86-LABEL: add_ultcmp_bad_i16_i8_cmp:
    475 ; X86:       # %bb.0:
    476 ; X86-NEXT:    movl $128, %eax
    477 ; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
    478 ; X86-NEXT:    cmpw {{[0-9]+}}(%esp), %ax
    479 ; X86-NEXT:    setb %al
    480 ; X86-NEXT:    retl
    481 ;
    482 ; X64-LABEL: add_ultcmp_bad_i16_i8_cmp:
    483 ; X64:       # %bb.0:
    484 ; X64-NEXT:    subl $-128, %edi
    485 ; X64-NEXT:    cmpw %si, %di
    486 ; X64-NEXT:    setb %al
    487 ; X64-NEXT:    retq
    488   %tmp0 = add i16 %x, 128 ; 1U << (8-1)
    489   %tmp1 = icmp ult i16 %tmp0, %y
    490   ret i1 %tmp1
    491 }
    492 
    493 ; Second constant is not larger than the first one
    494 define i1 @add_ultcmp_bad_i8_i16(i16 %x) nounwind {
    495 ; X86-LABEL: add_ultcmp_bad_i8_i16:
    496 ; X86:       # %bb.0:
    497 ; X86-NEXT:    movw $128, %ax
    498 ; X86-NEXT:    addw {{[0-9]+}}(%esp), %ax
    499 ; X86-NEXT:    setb %al
    500 ; X86-NEXT:    retl
    501 ;
    502 ; X64-LABEL: add_ultcmp_bad_i8_i16:
    503 ; X64:       # %bb.0:
    504 ; X64-NEXT:    addw $128, %di
    505 ; X64-NEXT:    setb %al
    506 ; X64-NEXT:    retq
    507   %tmp0 = add i16 %x, 128 ; 1U << (8-1)
    508   %tmp1 = icmp ult i16 %tmp0, 128 ; 1U << (8-1)
    509   ret i1 %tmp1
    510 }
    511 
    512 ; First constant is not power of two
    513 define i1 @add_ultcmp_bad_i16_i8_c0notpoweroftwo(i16 %x) nounwind {
    514 ; X86-LABEL: add_ultcmp_bad_i16_i8_c0notpoweroftwo:
    515 ; X86:       # %bb.0:
    516 ; X86-NEXT:    movl $192, %eax
    517 ; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
    518 ; X86-NEXT:    movzwl %ax, %eax
    519 ; X86-NEXT:    cmpl $256, %eax # imm = 0x100
    520 ; X86-NEXT:    setb %al
    521 ; X86-NEXT:    retl
    522 ;
    523 ; X64-LABEL: add_ultcmp_bad_i16_i8_c0notpoweroftwo:
    524 ; X64:       # %bb.0:
    525 ; X64-NEXT:    addl $192, %edi
    526 ; X64-NEXT:    movzwl %di, %eax
    527 ; X64-NEXT:    cmpl $256, %eax # imm = 0x100
    528 ; X64-NEXT:    setb %al
    529 ; X64-NEXT:    retq
    530   %tmp0 = add i16 %x, 192 ; (1U << (8-1)) + (1U << (8-1-1))
    531   %tmp1 = icmp ult i16 %tmp0, 256 ; 1U << 8
    532   ret i1 %tmp1
    533 }
    534 
    535 ; Second constant is not power of two
    536 define i1 @add_ultcmp_bad_i16_i8_c1notpoweroftwo(i16 %x) nounwind {
    537 ; X86-LABEL: add_ultcmp_bad_i16_i8_c1notpoweroftwo:
    538 ; X86:       # %bb.0:
    539 ; X86-NEXT:    movl $128, %eax
    540 ; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
    541 ; X86-NEXT:    movzwl %ax, %eax
    542 ; X86-NEXT:    cmpl $768, %eax # imm = 0x300
    543 ; X86-NEXT:    setb %al
    544 ; X86-NEXT:    retl
    545 ;
    546 ; X64-LABEL: add_ultcmp_bad_i16_i8_c1notpoweroftwo:
    547 ; X64:       # %bb.0:
    548 ; X64-NEXT:    subl $-128, %edi
    549 ; X64-NEXT:    movzwl %di, %eax
    550 ; X64-NEXT:    cmpl $768, %eax # imm = 0x300
    551 ; X64-NEXT:    setb %al
    552 ; X64-NEXT:    retq
    553   %tmp0 = add i16 %x, 128 ; 1U << (8-1)
    554   %tmp1 = icmp ult i16 %tmp0, 768 ; (1U << 8)) + (1U << (8+1))
    555   ret i1 %tmp1
    556 }
    557 
    558 ; Magic check fails, 64 << 1 != 256
    559 define i1 @add_ultcmp_bad_i16_i8_magic(i16 %x) nounwind {
    560 ; X86-LABEL: add_ultcmp_bad_i16_i8_magic:
    561 ; X86:       # %bb.0:
    562 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    563 ; X86-NEXT:    addl $64, %eax
    564 ; X86-NEXT:    movzwl %ax, %eax
    565 ; X86-NEXT:    cmpl $256, %eax # imm = 0x100
    566 ; X86-NEXT:    setb %al
    567 ; X86-NEXT:    retl
    568 ;
    569 ; X64-LABEL: add_ultcmp_bad_i16_i8_magic:
    570 ; X64:       # %bb.0:
    571 ; X64-NEXT:    addl $64, %edi
    572 ; X64-NEXT:    movzwl %di, %eax
    573 ; X64-NEXT:    cmpl $256, %eax # imm = 0x100
    574 ; X64-NEXT:    setb %al
    575 ; X64-NEXT:    retq
    576   %tmp0 = add i16 %x, 64 ; 1U << (8-1-1)
    577   %tmp1 = icmp ult i16 %tmp0, 256 ; 1U << 8
    578   ret i1 %tmp1
    579 }
    580 
    581 ; Bad 'destination type'
    582 define i1 @add_ultcmp_bad_i16_i4(i16 %x) nounwind {
    583 ; X86-LABEL: add_ultcmp_bad_i16_i4:
    584 ; X86:       # %bb.0:
    585 ; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
    586 ; X86-NEXT:    addl $8, %eax
    587 ; X86-NEXT:    movzwl %ax, %eax
    588 ; X86-NEXT:    cmpl $16, %eax
    589 ; X86-NEXT:    setb %al
    590 ; X86-NEXT:    retl
    591 ;
    592 ; X64-LABEL: add_ultcmp_bad_i16_i4:
    593 ; X64:       # %bb.0:
    594 ; X64-NEXT:    addl $8, %edi
    595 ; X64-NEXT:    movzwl %di, %eax
    596 ; X64-NEXT:    cmpl $16, %eax
    597 ; X64-NEXT:    setb %al
    598 ; X64-NEXT:    retq
    599   %tmp0 = add i16 %x, 8 ; 1U << (4-1)
    600   %tmp1 = icmp ult i16 %tmp0, 16 ; 1U << 4
    601   ret i1 %tmp1
    602 }
    603 
    604 ; Bad storage type
    605 define i1 @add_ultcmp_bad_i24_i8(i24 %x) nounwind {
    606 ; X86-LABEL: add_ultcmp_bad_i24_i8:
    607 ; X86:       # %bb.0:
    608 ; X86-NEXT:    movl $128, %eax
    609 ; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
    610 ; X86-NEXT:    andl $16777215, %eax # imm = 0xFFFFFF
    611 ; X86-NEXT:    cmpl $256, %eax # imm = 0x100
    612 ; X86-NEXT:    setb %al
    613 ; X86-NEXT:    retl
    614 ;
    615 ; X64-LABEL: add_ultcmp_bad_i24_i8:
    616 ; X64:       # %bb.0:
    617 ; X64-NEXT:    subl $-128, %edi
    618 ; X64-NEXT:    andl $16777215, %edi # imm = 0xFFFFFF
    619 ; X64-NEXT:    cmpl $256, %edi # imm = 0x100
    620 ; X64-NEXT:    setb %al
    621 ; X64-NEXT:    retq
    622   %tmp0 = add i24 %x, 128 ; 1U << (8-1)
    623   %tmp1 = icmp ult i24 %tmp0, 256 ; 1U << 8
    624   ret i1 %tmp1
    625 }
    626 
    627 define i1 @add_ulecmp_bad_i16_i8(i16 %x) nounwind {
    628 ; CHECK-LABEL: add_ulecmp_bad_i16_i8:
    629 ; CHECK:       # %bb.0:
    630 ; CHECK-NEXT:    movb $1, %al
    631 ; CHECK-NEXT:    ret{{[l|q]}}
    632   %tmp0 = add i16 %x, 128 ; 1U << (8-1)
    633   %tmp1 = icmp ule i16 %tmp0, -1 ; when we +1 it, it will wrap to 0
    634   ret i1 %tmp1
    635 }
    636