1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2 ; RUN: llc -mtriple=i686-unknown-linux-gnu < %s | FileCheck %s --check-prefixes=CHECK,X86,NOBMI2,X86-NOBMI2,FALLBACK0,X86-FALLBACK0 3 ; RUN: llc -mtriple=x86_64-unknown-linux-gnu < %s | FileCheck %s --check-prefixes=CHECK,X64,NOBMI2,X64-NOBMI2,FALLBACK0,X64-FALLBACK0 4 5 ; https://bugs.llvm.org/show_bug.cgi?id=38149 6 7 ; We are truncating from wider width, and then sign-extending 8 ; back to the original width. Then we inequality-comparing orig and src. 9 ; If they don't match, then we had signed truncation during truncation. 10 11 ; This can be expressed in a several ways in IR: 12 ; trunc + sext + icmp ne <- not canonical 13 ; shl + ashr + icmp ne 14 ; add + icmp ult 15 ; add + icmp uge/ugt 16 ; However only the simplest form (with two shifts) gets lowered best. 17 18 ; ---------------------------------------------------------------------------- ; 19 ; shl + ashr + icmp ne 20 ; ---------------------------------------------------------------------------- ; 21 22 define i1 @shifts_necmp_i16_i8(i16 %x) nounwind { 23 ; X86-LABEL: shifts_necmp_i16_i8: 24 ; X86: # %bb.0: 25 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax 26 ; X86-NEXT: movsbl %al, %ecx 27 ; X86-NEXT: cmpw %ax, %cx 28 ; X86-NEXT: setne %al 29 ; X86-NEXT: retl 30 ; 31 ; X64-LABEL: shifts_necmp_i16_i8: 32 ; X64: # %bb.0: 33 ; X64-NEXT: movsbl %dil, %eax 34 ; X64-NEXT: cmpw %di, %ax 35 ; X64-NEXT: setne %al 36 ; X64-NEXT: retq 37 %tmp0 = shl i16 %x, 8 ; 16-8 38 %tmp1 = ashr exact i16 %tmp0, 8 ; 16-8 39 %tmp2 = icmp ne i16 %tmp1, %x 40 ret i1 %tmp2 41 } 42 43 define i1 @shifts_necmp_i32_i16(i32 %x) nounwind { 44 ; X86-LABEL: shifts_necmp_i32_i16: 45 ; X86: # %bb.0: 46 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 47 ; X86-NEXT: movswl %ax, %ecx 48 ; X86-NEXT: cmpl %eax, %ecx 49 ; X86-NEXT: setne %al 50 ; X86-NEXT: retl 51 ; 52 ; X64-LABEL: shifts_necmp_i32_i16: 53 ; X64: # %bb.0: 54 ; X64-NEXT: movswl %di, %eax 55 ; X64-NEXT: cmpl %edi, %eax 56 ; X64-NEXT: setne %al 57 ; X64-NEXT: retq 58 %tmp0 = shl i32 %x, 16 ; 32-16 59 %tmp1 = ashr exact i32 %tmp0, 16 ; 32-16 60 %tmp2 = icmp ne i32 %tmp1, %x 61 ret i1 %tmp2 62 } 63 64 define i1 @shifts_necmp_i32_i8(i32 %x) nounwind { 65 ; X86-LABEL: shifts_necmp_i32_i8: 66 ; X86: # %bb.0: 67 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 68 ; X86-NEXT: movsbl %al, %ecx 69 ; X86-NEXT: cmpl %eax, %ecx 70 ; X86-NEXT: setne %al 71 ; X86-NEXT: retl 72 ; 73 ; X64-LABEL: shifts_necmp_i32_i8: 74 ; X64: # %bb.0: 75 ; X64-NEXT: movsbl %dil, %eax 76 ; X64-NEXT: cmpl %edi, %eax 77 ; X64-NEXT: setne %al 78 ; X64-NEXT: retq 79 %tmp0 = shl i32 %x, 24 ; 32-8 80 %tmp1 = ashr exact i32 %tmp0, 24 ; 32-8 81 %tmp2 = icmp ne i32 %tmp1, %x 82 ret i1 %tmp2 83 } 84 85 define i1 @shifts_necmp_i64_i32(i64 %x) nounwind { 86 ; X86-LABEL: shifts_necmp_i64_i32: 87 ; X86: # %bb.0: 88 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 89 ; X86-NEXT: sarl $31, %eax 90 ; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax 91 ; X86-NEXT: setne %al 92 ; X86-NEXT: retl 93 ; 94 ; X64-LABEL: shifts_necmp_i64_i32: 95 ; X64: # %bb.0: 96 ; X64-NEXT: movslq %edi, %rax 97 ; X64-NEXT: cmpq %rdi, %rax 98 ; X64-NEXT: setne %al 99 ; X64-NEXT: retq 100 %tmp0 = shl i64 %x, 32 ; 64-32 101 %tmp1 = ashr exact i64 %tmp0, 32 ; 64-32 102 %tmp2 = icmp ne i64 %tmp1, %x 103 ret i1 %tmp2 104 } 105 106 define i1 @shifts_necmp_i64_i16(i64 %x) nounwind { 107 ; X86-LABEL: shifts_necmp_i64_i16: 108 ; X86: # %bb.0: 109 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 110 ; X86-NEXT: movswl %ax, %ecx 111 ; X86-NEXT: movl %ecx, %edx 112 ; X86-NEXT: sarl $31, %edx 113 ; X86-NEXT: xorl %eax, %ecx 114 ; X86-NEXT: xorl {{[0-9]+}}(%esp), %edx 115 ; X86-NEXT: orl %ecx, %edx 116 ; X86-NEXT: setne %al 117 ; X86-NEXT: retl 118 ; 119 ; X64-LABEL: shifts_necmp_i64_i16: 120 ; X64: # %bb.0: 121 ; X64-NEXT: movswq %di, %rax 122 ; X64-NEXT: cmpq %rdi, %rax 123 ; X64-NEXT: setne %al 124 ; X64-NEXT: retq 125 %tmp0 = shl i64 %x, 48 ; 64-16 126 %tmp1 = ashr exact i64 %tmp0, 48 ; 64-16 127 %tmp2 = icmp ne i64 %tmp1, %x 128 ret i1 %tmp2 129 } 130 131 define i1 @shifts_necmp_i64_i8(i64 %x) nounwind { 132 ; X86-LABEL: shifts_necmp_i64_i8: 133 ; X86: # %bb.0: 134 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 135 ; X86-NEXT: movsbl %al, %ecx 136 ; X86-NEXT: movl %ecx, %edx 137 ; X86-NEXT: sarl $31, %edx 138 ; X86-NEXT: xorl %eax, %ecx 139 ; X86-NEXT: xorl {{[0-9]+}}(%esp), %edx 140 ; X86-NEXT: orl %ecx, %edx 141 ; X86-NEXT: setne %al 142 ; X86-NEXT: retl 143 ; 144 ; X64-LABEL: shifts_necmp_i64_i8: 145 ; X64: # %bb.0: 146 ; X64-NEXT: movsbq %dil, %rax 147 ; X64-NEXT: cmpq %rdi, %rax 148 ; X64-NEXT: setne %al 149 ; X64-NEXT: retq 150 %tmp0 = shl i64 %x, 56 ; 64-8 151 %tmp1 = ashr exact i64 %tmp0, 56 ; 64-8 152 %tmp2 = icmp ne i64 %tmp1, %x 153 ret i1 %tmp2 154 } 155 156 ; ---------------------------------------------------------------------------- ; 157 ; add + icmp ult 158 ; ---------------------------------------------------------------------------- ; 159 160 define i1 @add_ultcmp_i16_i8(i16 %x) nounwind { 161 ; X86-LABEL: add_ultcmp_i16_i8: 162 ; X86: # %bb.0: 163 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 164 ; X86-NEXT: addl $-128, %eax 165 ; X86-NEXT: movzwl %ax, %eax 166 ; X86-NEXT: cmpl $65280, %eax # imm = 0xFF00 167 ; X86-NEXT: setb %al 168 ; X86-NEXT: retl 169 ; 170 ; X64-LABEL: add_ultcmp_i16_i8: 171 ; X64: # %bb.0: 172 ; X64-NEXT: addl $-128, %edi 173 ; X64-NEXT: movzwl %di, %eax 174 ; X64-NEXT: cmpl $65280, %eax # imm = 0xFF00 175 ; X64-NEXT: setb %al 176 ; X64-NEXT: retq 177 %tmp0 = add i16 %x, -128 ; ~0U << (8-1) 178 %tmp1 = icmp ult i16 %tmp0, -256 ; ~0U << 8 179 ret i1 %tmp1 180 } 181 182 define i1 @add_ultcmp_i32_i16(i32 %x) nounwind { 183 ; X86-LABEL: add_ultcmp_i32_i16: 184 ; X86: # %bb.0: 185 ; X86-NEXT: movl $-32768, %eax # imm = 0x8000 186 ; X86-NEXT: addl {{[0-9]+}}(%esp), %eax 187 ; X86-NEXT: cmpl $-65536, %eax # imm = 0xFFFF0000 188 ; X86-NEXT: setb %al 189 ; X86-NEXT: retl 190 ; 191 ; X64-LABEL: add_ultcmp_i32_i16: 192 ; X64: # %bb.0: 193 ; X64-NEXT: addl $-32768, %edi # imm = 0x8000 194 ; X64-NEXT: cmpl $-65536, %edi # imm = 0xFFFF0000 195 ; X64-NEXT: setb %al 196 ; X64-NEXT: retq 197 %tmp0 = add i32 %x, -32768 ; ~0U << (16-1) 198 %tmp1 = icmp ult i32 %tmp0, -65536 ; ~0U << 16 199 ret i1 %tmp1 200 } 201 202 define i1 @add_ultcmp_i32_i8(i32 %x) nounwind { 203 ; X86-LABEL: add_ultcmp_i32_i8: 204 ; X86: # %bb.0: 205 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 206 ; X86-NEXT: addl $-128, %eax 207 ; X86-NEXT: cmpl $-256, %eax 208 ; X86-NEXT: setb %al 209 ; X86-NEXT: retl 210 ; 211 ; X64-LABEL: add_ultcmp_i32_i8: 212 ; X64: # %bb.0: 213 ; X64-NEXT: addl $-128, %edi 214 ; X64-NEXT: cmpl $-256, %edi 215 ; X64-NEXT: setb %al 216 ; X64-NEXT: retq 217 %tmp0 = add i32 %x, -128 ; ~0U << (8-1) 218 %tmp1 = icmp ult i32 %tmp0, -256 ; ~0U << 8 219 ret i1 %tmp1 220 } 221 222 define i1 @add_ultcmp_i64_i32(i64 %x) nounwind { 223 ; X86-LABEL: add_ultcmp_i64_i32: 224 ; X86: # %bb.0: 225 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 226 ; X86-NEXT: movl $-2147483648, %ecx # imm = 0x80000000 227 ; X86-NEXT: addl {{[0-9]+}}(%esp), %ecx 228 ; X86-NEXT: adcl $-1, %eax 229 ; X86-NEXT: cmpl $-1, %eax 230 ; X86-NEXT: setne %al 231 ; X86-NEXT: retl 232 ; 233 ; X64-LABEL: add_ultcmp_i64_i32: 234 ; X64: # %bb.0: 235 ; X64-NEXT: addq $-2147483648, %rdi # imm = 0x80000000 236 ; X64-NEXT: movabsq $-4294967296, %rax # imm = 0xFFFFFFFF00000000 237 ; X64-NEXT: cmpq %rax, %rdi 238 ; X64-NEXT: setb %al 239 ; X64-NEXT: retq 240 %tmp0 = add i64 %x, -2147483648 ; ~0U << (32-1) 241 %tmp1 = icmp ult i64 %tmp0, -4294967296 ; ~0U << 32 242 ret i1 %tmp1 243 } 244 245 define i1 @add_ultcmp_i64_i16(i64 %x) nounwind { 246 ; X86-LABEL: add_ultcmp_i64_i16: 247 ; X86: # %bb.0: 248 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 249 ; X86-NEXT: movl $-32768, %ecx # imm = 0x8000 250 ; X86-NEXT: addl {{[0-9]+}}(%esp), %ecx 251 ; X86-NEXT: adcl $-1, %eax 252 ; X86-NEXT: cmpl $-65536, %ecx # imm = 0xFFFF0000 253 ; X86-NEXT: sbbl $-1, %eax 254 ; X86-NEXT: setb %al 255 ; X86-NEXT: retl 256 ; 257 ; X64-LABEL: add_ultcmp_i64_i16: 258 ; X64: # %bb.0: 259 ; X64-NEXT: addq $-32768, %rdi # imm = 0x8000 260 ; X64-NEXT: cmpq $-65536, %rdi # imm = 0xFFFF0000 261 ; X64-NEXT: setb %al 262 ; X64-NEXT: retq 263 %tmp0 = add i64 %x, -32768 ; ~0U << (16-1) 264 %tmp1 = icmp ult i64 %tmp0, -65536 ; ~0U << 16 265 ret i1 %tmp1 266 } 267 268 define i1 @add_ultcmp_i64_i8(i64 %x) nounwind { 269 ; X86-LABEL: add_ultcmp_i64_i8: 270 ; X86: # %bb.0: 271 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 272 ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx 273 ; X86-NEXT: addl $-128, %eax 274 ; X86-NEXT: adcl $-1, %ecx 275 ; X86-NEXT: cmpl $-256, %eax 276 ; X86-NEXT: sbbl $-1, %ecx 277 ; X86-NEXT: setb %al 278 ; X86-NEXT: retl 279 ; 280 ; X64-LABEL: add_ultcmp_i64_i8: 281 ; X64: # %bb.0: 282 ; X64-NEXT: addq $-128, %rdi 283 ; X64-NEXT: cmpq $-256, %rdi 284 ; X64-NEXT: setb %al 285 ; X64-NEXT: retq 286 %tmp0 = add i64 %x, -128 ; ~0U << (8-1) 287 %tmp1 = icmp ult i64 %tmp0, -256 ; ~0U << 8 288 ret i1 %tmp1 289 } 290 291 ; ---------------------------------------------------------------------------- ; 292 ; add + icmp uge 293 ; ---------------------------------------------------------------------------- ; 294 295 define i1 @add_ugecmp_i16_i8(i16 %x) nounwind { 296 ; X86-LABEL: add_ugecmp_i16_i8: 297 ; X86: # %bb.0: 298 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax 299 ; X86-NEXT: movsbl %al, %ecx 300 ; X86-NEXT: cmpw %ax, %cx 301 ; X86-NEXT: setne %al 302 ; X86-NEXT: retl 303 ; 304 ; X64-LABEL: add_ugecmp_i16_i8: 305 ; X64: # %bb.0: 306 ; X64-NEXT: movsbl %dil, %eax 307 ; X64-NEXT: cmpw %di, %ax 308 ; X64-NEXT: setne %al 309 ; X64-NEXT: retq 310 %tmp0 = add i16 %x, 128 ; 1U << (8-1) 311 %tmp1 = icmp uge i16 %tmp0, 256 ; 1U << 8 312 ret i1 %tmp1 313 } 314 315 define i1 @add_ugecmp_i32_i16(i32 %x) nounwind { 316 ; X86-LABEL: add_ugecmp_i32_i16: 317 ; X86: # %bb.0: 318 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 319 ; X86-NEXT: movswl %ax, %ecx 320 ; X86-NEXT: cmpl %eax, %ecx 321 ; X86-NEXT: setne %al 322 ; X86-NEXT: retl 323 ; 324 ; X64-LABEL: add_ugecmp_i32_i16: 325 ; X64: # %bb.0: 326 ; X64-NEXT: movswl %di, %eax 327 ; X64-NEXT: cmpl %edi, %eax 328 ; X64-NEXT: setne %al 329 ; X64-NEXT: retq 330 %tmp0 = add i32 %x, 32768 ; 1U << (16-1) 331 %tmp1 = icmp uge i32 %tmp0, 65536 ; 1U << 16 332 ret i1 %tmp1 333 } 334 335 define i1 @add_ugecmp_i32_i8(i32 %x) nounwind { 336 ; X86-LABEL: add_ugecmp_i32_i8: 337 ; X86: # %bb.0: 338 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 339 ; X86-NEXT: movsbl %al, %ecx 340 ; X86-NEXT: cmpl %eax, %ecx 341 ; X86-NEXT: setne %al 342 ; X86-NEXT: retl 343 ; 344 ; X64-LABEL: add_ugecmp_i32_i8: 345 ; X64: # %bb.0: 346 ; X64-NEXT: movsbl %dil, %eax 347 ; X64-NEXT: cmpl %edi, %eax 348 ; X64-NEXT: setne %al 349 ; X64-NEXT: retq 350 %tmp0 = add i32 %x, 128 ; 1U << (8-1) 351 %tmp1 = icmp uge i32 %tmp0, 256 ; 1U << 8 352 ret i1 %tmp1 353 } 354 355 define i1 @add_ugecmp_i64_i32(i64 %x) nounwind { 356 ; X86-LABEL: add_ugecmp_i64_i32: 357 ; X86: # %bb.0: 358 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 359 ; X86-NEXT: sarl $31, %eax 360 ; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax 361 ; X86-NEXT: setne %al 362 ; X86-NEXT: retl 363 ; 364 ; X64-LABEL: add_ugecmp_i64_i32: 365 ; X64: # %bb.0: 366 ; X64-NEXT: movslq %edi, %rax 367 ; X64-NEXT: cmpq %rdi, %rax 368 ; X64-NEXT: setne %al 369 ; X64-NEXT: retq 370 %tmp0 = add i64 %x, 2147483648 ; 1U << (32-1) 371 %tmp1 = icmp uge i64 %tmp0, 4294967296 ; 1U << 32 372 ret i1 %tmp1 373 } 374 375 define i1 @add_ugecmp_i64_i16(i64 %x) nounwind { 376 ; X86-LABEL: add_ugecmp_i64_i16: 377 ; X86: # %bb.0: 378 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 379 ; X86-NEXT: movswl %ax, %ecx 380 ; X86-NEXT: xorl %ecx, %eax 381 ; X86-NEXT: sarl $31, %ecx 382 ; X86-NEXT: xorl {{[0-9]+}}(%esp), %ecx 383 ; X86-NEXT: orl %eax, %ecx 384 ; X86-NEXT: setne %al 385 ; X86-NEXT: retl 386 ; 387 ; X64-LABEL: add_ugecmp_i64_i16: 388 ; X64: # %bb.0: 389 ; X64-NEXT: movswq %di, %rax 390 ; X64-NEXT: cmpq %rdi, %rax 391 ; X64-NEXT: setne %al 392 ; X64-NEXT: retq 393 %tmp0 = add i64 %x, 32768 ; 1U << (16-1) 394 %tmp1 = icmp uge i64 %tmp0, 65536 ; 1U << 16 395 ret i1 %tmp1 396 } 397 398 define i1 @add_ugecmp_i64_i8(i64 %x) nounwind { 399 ; X86-LABEL: add_ugecmp_i64_i8: 400 ; X86: # %bb.0: 401 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 402 ; X86-NEXT: movsbl %al, %ecx 403 ; X86-NEXT: xorl %ecx, %eax 404 ; X86-NEXT: sarl $31, %ecx 405 ; X86-NEXT: xorl {{[0-9]+}}(%esp), %ecx 406 ; X86-NEXT: orl %eax, %ecx 407 ; X86-NEXT: setne %al 408 ; X86-NEXT: retl 409 ; 410 ; X64-LABEL: add_ugecmp_i64_i8: 411 ; X64: # %bb.0: 412 ; X64-NEXT: movsbq %dil, %rax 413 ; X64-NEXT: cmpq %rdi, %rax 414 ; X64-NEXT: setne %al 415 ; X64-NEXT: retq 416 %tmp0 = add i64 %x, 128 ; 1U << (8-1) 417 %tmp1 = icmp uge i64 %tmp0, 256 ; 1U << 8 418 ret i1 %tmp1 419 } 420 421 ; Slightly more canonical variant 422 define i1 @add_ugtcmp_i16_i8(i16 %x) nounwind { 423 ; X86-LABEL: add_ugtcmp_i16_i8: 424 ; X86: # %bb.0: 425 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax 426 ; X86-NEXT: movsbl %al, %ecx 427 ; X86-NEXT: cmpw %ax, %cx 428 ; X86-NEXT: setne %al 429 ; X86-NEXT: retl 430 ; 431 ; X64-LABEL: add_ugtcmp_i16_i8: 432 ; X64: # %bb.0: 433 ; X64-NEXT: movsbl %dil, %eax 434 ; X64-NEXT: cmpw %di, %ax 435 ; X64-NEXT: setne %al 436 ; X64-NEXT: retq 437 %tmp0 = add i16 %x, 128 ; 1U << (8-1) 438 %tmp1 = icmp ugt i16 %tmp0, 255 ; (1U << 8) - 1 439 ret i1 %tmp1 440 } 441 442 ; Negative tests 443 ; ---------------------------------------------------------------------------- ; 444 445 ; Adding not a constant 446 define i1 @add_ugecmp_bad_i16_i8_add(i16 %x, i16 %y) nounwind { 447 ; X86-LABEL: add_ugecmp_bad_i16_i8_add: 448 ; X86: # %bb.0: 449 ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax 450 ; X86-NEXT: addw {{[0-9]+}}(%esp), %ax 451 ; X86-NEXT: movzwl %ax, %eax 452 ; X86-NEXT: cmpl $255, %eax 453 ; X86-NEXT: seta %al 454 ; X86-NEXT: retl 455 ; 456 ; X64-LABEL: add_ugecmp_bad_i16_i8_add: 457 ; X64: # %bb.0: 458 ; X64-NEXT: addl %esi, %edi 459 ; X64-NEXT: movzwl %di, %eax 460 ; X64-NEXT: cmpl $255, %eax 461 ; X64-NEXT: seta %al 462 ; X64-NEXT: retq 463 %tmp0 = add i16 %x, %y 464 %tmp1 = icmp uge i16 %tmp0, 256 ; 1U << 8 465 ret i1 %tmp1 466 } 467 468 ; Comparing not with a constant 469 define i1 @add_ugecmp_bad_i16_i8_cmp(i16 %x, i16 %y) nounwind { 470 ; X86-LABEL: add_ugecmp_bad_i16_i8_cmp: 471 ; X86: # %bb.0: 472 ; X86-NEXT: movl $128, %eax 473 ; X86-NEXT: addl {{[0-9]+}}(%esp), %eax 474 ; X86-NEXT: cmpw {{[0-9]+}}(%esp), %ax 475 ; X86-NEXT: setae %al 476 ; X86-NEXT: retl 477 ; 478 ; X64-LABEL: add_ugecmp_bad_i16_i8_cmp: 479 ; X64: # %bb.0: 480 ; X64-NEXT: subl $-128, %edi 481 ; X64-NEXT: cmpw %si, %di 482 ; X64-NEXT: setae %al 483 ; X64-NEXT: retq 484 %tmp0 = add i16 %x, 128 ; 1U << (8-1) 485 %tmp1 = icmp uge i16 %tmp0, %y 486 ret i1 %tmp1 487 } 488 489 ; Second constant is not larger than the first one 490 define i1 @add_ugecmp_bad_i8_i16(i16 %x) nounwind { 491 ; X86-LABEL: add_ugecmp_bad_i8_i16: 492 ; X86: # %bb.0: 493 ; X86-NEXT: movl $128, %eax 494 ; X86-NEXT: addl {{[0-9]+}}(%esp), %eax 495 ; X86-NEXT: movzwl %ax, %eax 496 ; X86-NEXT: cmpl $127, %eax 497 ; X86-NEXT: seta %al 498 ; X86-NEXT: retl 499 ; 500 ; X64-LABEL: add_ugecmp_bad_i8_i16: 501 ; X64: # %bb.0: 502 ; X64-NEXT: subl $-128, %edi 503 ; X64-NEXT: movzwl %di, %eax 504 ; X64-NEXT: cmpl $127, %eax 505 ; X64-NEXT: seta %al 506 ; X64-NEXT: retq 507 %tmp0 = add i16 %x, 128 ; 1U << (8-1) 508 %tmp1 = icmp uge i16 %tmp0, 128 ; 1U << (8-1) 509 ret i1 %tmp1 510 } 511 512 ; First constant is not power of two 513 define i1 @add_ugecmp_bad_i16_i8_c0notpoweroftwo(i16 %x) nounwind { 514 ; X86-LABEL: add_ugecmp_bad_i16_i8_c0notpoweroftwo: 515 ; X86: # %bb.0: 516 ; X86-NEXT: movl $192, %eax 517 ; X86-NEXT: addl {{[0-9]+}}(%esp), %eax 518 ; X86-NEXT: movzwl %ax, %eax 519 ; X86-NEXT: cmpl $255, %eax 520 ; X86-NEXT: seta %al 521 ; X86-NEXT: retl 522 ; 523 ; X64-LABEL: add_ugecmp_bad_i16_i8_c0notpoweroftwo: 524 ; X64: # %bb.0: 525 ; X64-NEXT: addl $192, %edi 526 ; X64-NEXT: movzwl %di, %eax 527 ; X64-NEXT: cmpl $255, %eax 528 ; X64-NEXT: seta %al 529 ; X64-NEXT: retq 530 %tmp0 = add i16 %x, 192 ; (1U << (8-1)) + (1U << (8-1-1)) 531 %tmp1 = icmp uge i16 %tmp0, 256 ; 1U << 8 532 ret i1 %tmp1 533 } 534 535 ; Second constant is not power of two 536 define i1 @add_ugecmp_bad_i16_i8_c1notpoweroftwo(i16 %x) nounwind { 537 ; X86-LABEL: add_ugecmp_bad_i16_i8_c1notpoweroftwo: 538 ; X86: # %bb.0: 539 ; X86-NEXT: movl $128, %eax 540 ; X86-NEXT: addl {{[0-9]+}}(%esp), %eax 541 ; X86-NEXT: movzwl %ax, %eax 542 ; X86-NEXT: cmpl $767, %eax # imm = 0x2FF 543 ; X86-NEXT: seta %al 544 ; X86-NEXT: retl 545 ; 546 ; X64-LABEL: add_ugecmp_bad_i16_i8_c1notpoweroftwo: 547 ; X64: # %bb.0: 548 ; X64-NEXT: subl $-128, %edi 549 ; X64-NEXT: movzwl %di, %eax 550 ; X64-NEXT: cmpl $767, %eax # imm = 0x2FF 551 ; X64-NEXT: seta %al 552 ; X64-NEXT: retq 553 %tmp0 = add i16 %x, 128 ; 1U << (8-1) 554 %tmp1 = icmp uge i16 %tmp0, 768 ; (1U << 8)) + (1U << (8+1)) 555 ret i1 %tmp1 556 } 557 558 ; Magic check fails, 64 << 1 != 256 559 define i1 @add_ugecmp_bad_i16_i8_magic(i16 %x) nounwind { 560 ; X86-LABEL: add_ugecmp_bad_i16_i8_magic: 561 ; X86: # %bb.0: 562 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 563 ; X86-NEXT: addl $64, %eax 564 ; X86-NEXT: movzwl %ax, %eax 565 ; X86-NEXT: cmpl $255, %eax 566 ; X86-NEXT: seta %al 567 ; X86-NEXT: retl 568 ; 569 ; X64-LABEL: add_ugecmp_bad_i16_i8_magic: 570 ; X64: # %bb.0: 571 ; X64-NEXT: addl $64, %edi 572 ; X64-NEXT: movzwl %di, %eax 573 ; X64-NEXT: cmpl $255, %eax 574 ; X64-NEXT: seta %al 575 ; X64-NEXT: retq 576 %tmp0 = add i16 %x, 64 ; 1U << (8-1-1) 577 %tmp1 = icmp uge i16 %tmp0, 256 ; 1U << 8 578 ret i1 %tmp1 579 } 580 581 ; Bad 'destination type' 582 define i1 @add_ugecmp_bad_i16_i4(i16 %x) nounwind { 583 ; X86-LABEL: add_ugecmp_bad_i16_i4: 584 ; X86: # %bb.0: 585 ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 586 ; X86-NEXT: addl $8, %eax 587 ; X86-NEXT: movzwl %ax, %eax 588 ; X86-NEXT: cmpl $15, %eax 589 ; X86-NEXT: seta %al 590 ; X86-NEXT: retl 591 ; 592 ; X64-LABEL: add_ugecmp_bad_i16_i4: 593 ; X64: # %bb.0: 594 ; X64-NEXT: addl $8, %edi 595 ; X64-NEXT: movzwl %di, %eax 596 ; X64-NEXT: cmpl $15, %eax 597 ; X64-NEXT: seta %al 598 ; X64-NEXT: retq 599 %tmp0 = add i16 %x, 8 ; 1U << (4-1) 600 %tmp1 = icmp uge i16 %tmp0, 16 ; 1U << 4 601 ret i1 %tmp1 602 } 603 604 ; Bad storage type 605 define i1 @add_ugecmp_bad_i24_i8(i24 %x) nounwind { 606 ; X86-LABEL: add_ugecmp_bad_i24_i8: 607 ; X86: # %bb.0: 608 ; X86-NEXT: movl $128, %eax 609 ; X86-NEXT: addl {{[0-9]+}}(%esp), %eax 610 ; X86-NEXT: andl $16777215, %eax # imm = 0xFFFFFF 611 ; X86-NEXT: cmpl $255, %eax 612 ; X86-NEXT: seta %al 613 ; X86-NEXT: retl 614 ; 615 ; X64-LABEL: add_ugecmp_bad_i24_i8: 616 ; X64: # %bb.0: 617 ; X64-NEXT: subl $-128, %edi 618 ; X64-NEXT: andl $16777215, %edi # imm = 0xFFFFFF 619 ; X64-NEXT: cmpl $255, %edi 620 ; X64-NEXT: seta %al 621 ; X64-NEXT: retq 622 %tmp0 = add i24 %x, 128 ; 1U << (8-1) 623 %tmp1 = icmp uge i24 %tmp0, 256 ; 1U << 8 624 ret i1 %tmp1 625 } 626 627 ; Slightly more canonical variant 628 define i1 @add_ugtcmp_bad_i16_i8(i16 %x) nounwind { 629 ; CHECK-LABEL: add_ugtcmp_bad_i16_i8: 630 ; CHECK: # %bb.0: 631 ; CHECK-NEXT: xorl %eax, %eax 632 ; CHECK-NEXT: ret{{[l|q]}} 633 %tmp0 = add i16 %x, 128 ; 1U << (8-1) 634 %tmp1 = icmp ugt i16 %tmp0, -1 ; when we +1 it, it will wrap to 0 635 ret i1 %tmp1 636 } 637