1 ; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=SI %s 2 ; RUN: llc -march=amdgcn -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=VI %s 3 4 ; Since this intrinsic is exposed as a constant after isel, use it to 5 ; defeat the DAG's compare with constant canonicalizations. 6 declare i32 @llvm.amdgcn.groupstaticsize() #1 7 8 @lds = addrspace(3) global [512 x i32] undef, align 4 9 10 ; GCN-LABEL: {{^}}br_scc_eq_i32_inline_imm: 11 ; GCN: s_cmp_eq_u32 s{{[0-9]+}}, 4{{$}} 12 define amdgpu_kernel void @br_scc_eq_i32_inline_imm(i32 %cond, i32 addrspace(1)* %out) #0 { 13 entry: 14 %cmp0 = icmp eq i32 %cond, 4 15 br i1 %cmp0, label %endif, label %if 16 17 if: 18 call void asm sideeffect "", ""() 19 br label %endif 20 21 endif: 22 store volatile i32 1, i32 addrspace(1)* %out 23 ret void 24 } 25 26 ; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_max: 27 ; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x7fff{{$}} 28 define amdgpu_kernel void @br_scc_eq_i32_simm16_max(i32 %cond, i32 addrspace(1)* %out) #0 { 29 entry: 30 %cmp0 = icmp eq i32 %cond, 32767 31 br i1 %cmp0, label %endif, label %if 32 33 if: 34 call void asm sideeffect "", ""() 35 br label %endif 36 37 endif: 38 store volatile i32 1, i32 addrspace(1)* %out 39 ret void 40 } 41 42 ; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_max_p1: 43 ; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0x8000{{$}} 44 define amdgpu_kernel void @br_scc_eq_i32_simm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 { 45 entry: 46 %cmp0 = icmp eq i32 %cond, 32768 47 br i1 %cmp0, label %endif, label %if 48 49 if: 50 call void asm sideeffect "", ""() 51 br label %endif 52 53 endif: 54 store volatile i32 1, i32 addrspace(1)* %out 55 ret void 56 } 57 58 ; GCN-LABEL: {{^}}br_scc_ne_i32_simm16_max_p1: 59 ; GCN: s_cmpk_lg_u32 s{{[0-9]+}}, 0x8000{{$}} 60 define amdgpu_kernel void @br_scc_ne_i32_simm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 { 61 entry: 62 %cmp0 = icmp ne i32 %cond, 32768 63 br i1 %cmp0, label %endif, label %if 64 65 if: 66 call void asm sideeffect "", ""() 67 br label %endif 68 69 endif: 70 store volatile i32 1, i32 addrspace(1)* %out 71 ret void 72 } 73 74 ; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_min: 75 ; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x8000{{$}} 76 define amdgpu_kernel void @br_scc_eq_i32_simm16_min(i32 %cond, i32 addrspace(1)* %out) #0 { 77 entry: 78 %cmp0 = icmp eq i32 %cond, -32768 79 br i1 %cmp0, label %endif, label %if 80 81 if: 82 call void asm sideeffect "", ""() 83 br label %endif 84 85 endif: 86 store volatile i32 1, i32 addrspace(1)* %out 87 ret void 88 } 89 90 ; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_min_m1: 91 ; GCN: s_cmp_eq_u32 s{{[0-9]+}}, 0xffff7fff{{$}} 92 define amdgpu_kernel void @br_scc_eq_i32_simm16_min_m1(i32 %cond, i32 addrspace(1)* %out) #0 { 93 entry: 94 %cmp0 = icmp eq i32 %cond, -32769 95 br i1 %cmp0, label %endif, label %if 96 97 if: 98 call void asm sideeffect "", ""() 99 br label %endif 100 101 endif: 102 store volatile i32 1, i32 addrspace(1)* %out 103 ret void 104 } 105 106 ; GCN-LABEL: {{^}}br_scc_eq_i32_uimm15_max: 107 ; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0xffff{{$}} 108 define amdgpu_kernel void @br_scc_eq_i32_uimm15_max(i32 %cond, i32 addrspace(1)* %out) #0 { 109 entry: 110 %cmp0 = icmp eq i32 %cond, 65535 111 br i1 %cmp0, label %endif, label %if 112 113 if: 114 call void asm sideeffect "", ""() 115 br label %endif 116 117 endif: 118 store volatile i32 1, i32 addrspace(1)* %out 119 ret void 120 } 121 122 ; GCN-LABEL: {{^}}br_scc_eq_i32_uimm16_max: 123 ; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0xffff{{$}} 124 define amdgpu_kernel void @br_scc_eq_i32_uimm16_max(i32 %cond, i32 addrspace(1)* %out) #0 { 125 entry: 126 %cmp0 = icmp eq i32 %cond, 65535 127 br i1 %cmp0, label %endif, label %if 128 129 if: 130 call void asm sideeffect "", ""() 131 br label %endif 132 133 endif: 134 store volatile i32 1, i32 addrspace(1)* %out 135 ret void 136 } 137 138 ; GCN-LABEL: {{^}}br_scc_eq_i32_uimm16_max_p1: 139 ; GCN: s_cmp_eq_u32 s{{[0-9]+}}, 0x10000{{$}} 140 define amdgpu_kernel void @br_scc_eq_i32_uimm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 { 141 entry: 142 %cmp0 = icmp eq i32 %cond, 65536 143 br i1 %cmp0, label %endif, label %if 144 145 if: 146 call void asm sideeffect "", ""() 147 br label %endif 148 149 endif: 150 store volatile i32 1, i32 addrspace(1)* %out 151 ret void 152 } 153 154 155 ; GCN-LABEL: {{^}}br_scc_eq_i32: 156 ; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x41{{$}} 157 define amdgpu_kernel void @br_scc_eq_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 158 entry: 159 %cmp0 = icmp eq i32 %cond, 65 160 br i1 %cmp0, label %endif, label %if 161 162 if: 163 call void asm sideeffect "", ""() 164 br label %endif 165 166 endif: 167 store volatile i32 1, i32 addrspace(1)* %out 168 ret void 169 } 170 171 ; GCN-LABEL: {{^}}br_scc_ne_i32: 172 ; GCN: s_cmpk_lg_i32 s{{[0-9]+}}, 0x41{{$}} 173 define amdgpu_kernel void @br_scc_ne_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 174 entry: 175 %cmp0 = icmp ne i32 %cond, 65 176 br i1 %cmp0, label %endif, label %if 177 178 if: 179 call void asm sideeffect "", ""() 180 br label %endif 181 182 endif: 183 store volatile i32 1, i32 addrspace(1)* %out 184 ret void 185 } 186 187 ; GCN-LABEL: {{^}}br_scc_sgt_i32: 188 ; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x41{{$}} 189 define amdgpu_kernel void @br_scc_sgt_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 190 entry: 191 %cmp0 = icmp sgt i32 %cond, 65 192 br i1 %cmp0, label %endif, label %if 193 194 if: 195 call void asm sideeffect "", ""() 196 br label %endif 197 198 endif: 199 store volatile i32 1, i32 addrspace(1)* %out 200 ret void 201 } 202 203 ; GCN-LABEL: {{^}}br_scc_sgt_i32_simm16_max: 204 ; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x7fff{{$}} 205 define amdgpu_kernel void @br_scc_sgt_i32_simm16_max(i32 %cond, i32 addrspace(1)* %out) #0 { 206 entry: 207 %cmp0 = icmp sgt i32 %cond, 32767 208 br i1 %cmp0, label %endif, label %if 209 210 if: 211 call void asm sideeffect "", ""() 212 br label %endif 213 214 endif: 215 store volatile i32 1, i32 addrspace(1)* %out 216 ret void 217 } 218 219 ; GCN-LABEL: {{^}}br_scc_sgt_i32_simm16_max_p1: 220 ; GCN: s_cmp_gt_i32 s{{[0-9]+}}, 0x8000{{$}} 221 define amdgpu_kernel void @br_scc_sgt_i32_simm16_max_p1(i32 %cond, i32 addrspace(1)* %out) #0 { 222 entry: 223 %cmp0 = icmp sgt i32 %cond, 32768 224 br i1 %cmp0, label %endif, label %if 225 226 if: 227 call void asm sideeffect "", ""() 228 br label %endif 229 230 endif: 231 store volatile i32 1, i32 addrspace(1)* %out 232 ret void 233 } 234 235 ; GCN-LABEL: {{^}}br_scc_sge_i32: 236 ; GCN: s_cmpk_ge_i32 s{{[0-9]+}}, 0x800{{$}} 237 define amdgpu_kernel void @br_scc_sge_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 238 entry: 239 %size = call i32 @llvm.amdgcn.groupstaticsize() 240 %cmp0 = icmp sge i32 %cond, %size 241 br i1 %cmp0, label %endif, label %if 242 243 if: 244 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 245 br label %endif 246 247 endif: 248 store volatile i32 1, i32 addrspace(1)* %out 249 ret void 250 } 251 252 ; GCN-LABEL: {{^}}br_scc_slt_i32: 253 ; GCN: s_cmpk_lt_i32 s{{[0-9]+}}, 0x41{{$}} 254 define amdgpu_kernel void @br_scc_slt_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 255 entry: 256 %cmp0 = icmp slt i32 %cond, 65 257 br i1 %cmp0, label %endif, label %if 258 259 if: 260 call void asm sideeffect "", ""() 261 br label %endif 262 263 endif: 264 store volatile i32 1, i32 addrspace(1)* %out 265 ret void 266 } 267 268 ; GCN-LABEL: {{^}}br_scc_sle_i32: 269 ; GCN: s_cmpk_le_i32 s{{[0-9]+}}, 0x800{{$}} 270 define amdgpu_kernel void @br_scc_sle_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 271 entry: 272 %size = call i32 @llvm.amdgcn.groupstaticsize() 273 %cmp0 = icmp sle i32 %cond, %size 274 br i1 %cmp0, label %endif, label %if 275 276 if: 277 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 278 br label %endif 279 280 endif: 281 store volatile i32 1, i32 addrspace(1)* %out 282 ret void 283 } 284 285 ; GCN-LABEL: {{^}}br_scc_ugt_i32: 286 ; GCN: s_cmpk_gt_u32 s{{[0-9]+}}, 0x800{{$}} 287 define amdgpu_kernel void @br_scc_ugt_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 288 entry: 289 %size = call i32 @llvm.amdgcn.groupstaticsize() 290 %cmp0 = icmp ugt i32 %cond, %size 291 br i1 %cmp0, label %endif, label %if 292 293 if: 294 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 295 br label %endif 296 297 endif: 298 store volatile i32 1, i32 addrspace(1)* %out 299 ret void 300 } 301 302 ; GCN-LABEL: {{^}}br_scc_uge_i32: 303 ; GCN: s_cmpk_ge_u32 s{{[0-9]+}}, 0x800{{$}} 304 define amdgpu_kernel void @br_scc_uge_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 305 entry: 306 %size = call i32 @llvm.amdgcn.groupstaticsize() 307 %cmp0 = icmp uge i32 %cond, %size 308 br i1 %cmp0, label %endif, label %if 309 310 if: 311 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 312 br label %endif 313 314 endif: 315 store volatile i32 1, i32 addrspace(1)* %out 316 ret void 317 } 318 319 ; GCN-LABEL: {{^}}br_scc_ult_i32: 320 ; GCN: s_cmpk_lt_u32 s{{[0-9]+}}, 0x41{{$}} 321 define amdgpu_kernel void @br_scc_ult_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 322 entry: 323 %cmp0 = icmp ult i32 %cond, 65 324 br i1 %cmp0, label %endif, label %if 325 326 if: 327 call void asm sideeffect "", ""() 328 br label %endif 329 330 endif: 331 store volatile i32 1, i32 addrspace(1)* %out 332 ret void 333 } 334 335 ; GCN-LABEL: {{^}}br_scc_ult_i32_min_simm16: 336 ; GCN: s_cmp_lt_u32 s2, 0xffff8000 337 define amdgpu_kernel void @br_scc_ult_i32_min_simm16(i32 %cond, i32 addrspace(1)* %out) #0 { 338 entry: 339 %cmp0 = icmp ult i32 %cond, -32768 340 br i1 %cmp0, label %endif, label %if 341 342 if: 343 call void asm sideeffect "", ""() 344 br label %endif 345 346 endif: 347 store volatile i32 1, i32 addrspace(1)* %out 348 ret void 349 } 350 351 ; GCN-LABEL: {{^}}br_scc_ult_i32_min_simm16_m1: 352 ; GCN: s_cmp_lt_u32 s{{[0-9]+}}, 0xffff7fff{{$}} 353 define amdgpu_kernel void @br_scc_ult_i32_min_simm16_m1(i32 %cond, i32 addrspace(1)* %out) #0 { 354 entry: 355 %cmp0 = icmp ult i32 %cond, -32769 356 br i1 %cmp0, label %endif, label %if 357 358 if: 359 call void asm sideeffect "", ""() 360 br label %endif 361 362 endif: 363 store volatile i32 1, i32 addrspace(1)* %out 364 ret void 365 } 366 367 ; GCN-LABEL: {{^}}br_scc_ule_i32: 368 ; GCN: s_cmpk_le_u32 s{{[0-9]+}}, 0x800{{$}} 369 define amdgpu_kernel void @br_scc_ule_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 370 entry: 371 %size = call i32 @llvm.amdgcn.groupstaticsize() 372 %cmp0 = icmp ule i32 %cond, %size 373 br i1 %cmp0, label %endif, label %if 374 375 if: 376 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 377 br label %endif 378 379 endif: 380 store volatile i32 1, i32 addrspace(1)* %out 381 ret void 382 } 383 384 ; GCN-LABEL: {{^}}commute_br_scc_eq_i32: 385 ; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x800{{$}} 386 define amdgpu_kernel void @commute_br_scc_eq_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 387 entry: 388 %size = call i32 @llvm.amdgcn.groupstaticsize() 389 %cmp0 = icmp eq i32 %size, %cond 390 br i1 %cmp0, label %endif, label %if 391 392 if: 393 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 394 br label %endif 395 396 endif: 397 store volatile i32 1, i32 addrspace(1)* %out 398 ret void 399 } 400 401 ; GCN-LABEL: {{^}}commute_br_scc_ne_i32: 402 ; GCN: s_cmpk_lg_i32 s{{[0-9]+}}, 0x800{{$}} 403 define amdgpu_kernel void @commute_br_scc_ne_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 404 entry: 405 %size = call i32 @llvm.amdgcn.groupstaticsize() 406 %cmp0 = icmp ne i32 %size, %cond 407 br i1 %cmp0, label %endif, label %if 408 409 if: 410 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 411 br label %endif 412 413 endif: 414 store volatile i32 1, i32 addrspace(1)* %out 415 ret void 416 } 417 418 ; GCN-LABEL: {{^}}commute_br_scc_sgt_i32: 419 ; GCN: s_cmpk_lt_i32 s{{[0-9]+}}, 0x800{{$}} 420 define amdgpu_kernel void @commute_br_scc_sgt_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 421 entry: 422 %size = call i32 @llvm.amdgcn.groupstaticsize() 423 %cmp0 = icmp sgt i32 %size, %cond 424 br i1 %cmp0, label %endif, label %if 425 426 if: 427 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 428 br label %endif 429 430 endif: 431 store volatile i32 1, i32 addrspace(1)* %out 432 ret void 433 } 434 435 ; GCN-LABEL: {{^}}commute_br_scc_sge_i32: 436 ; GCN: s_cmpk_le_i32 s{{[0-9]+}}, 0x800{{$}} 437 define amdgpu_kernel void @commute_br_scc_sge_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 438 entry: 439 %size = call i32 @llvm.amdgcn.groupstaticsize() 440 %cmp0 = icmp sge i32 %size, %cond 441 br i1 %cmp0, label %endif, label %if 442 443 if: 444 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 445 br label %endif 446 447 endif: 448 store volatile i32 1, i32 addrspace(1)* %out 449 ret void 450 } 451 452 ; GCN-LABEL: {{^}}commute_br_scc_slt_i32: 453 ; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x800{{$}} 454 define amdgpu_kernel void @commute_br_scc_slt_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 455 entry: 456 %size = call i32 @llvm.amdgcn.groupstaticsize() 457 %cmp0 = icmp slt i32 %size, %cond 458 br i1 %cmp0, label %endif, label %if 459 460 if: 461 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 462 br label %endif 463 464 endif: 465 store volatile i32 1, i32 addrspace(1)* %out 466 ret void 467 } 468 469 ; GCN-LABEL: {{^}}commute_br_scc_sle_i32: 470 ; GCN: s_cmpk_ge_i32 s{{[0-9]+}}, 0x800{{$}} 471 define amdgpu_kernel void @commute_br_scc_sle_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 472 entry: 473 %size = call i32 @llvm.amdgcn.groupstaticsize() 474 %cmp0 = icmp sle i32 %size, %cond 475 br i1 %cmp0, label %endif, label %if 476 477 if: 478 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 479 br label %endif 480 481 endif: 482 store volatile i32 1, i32 addrspace(1)* %out 483 ret void 484 } 485 486 ; GCN-LABEL: {{^}}commute_br_scc_ugt_i32: 487 ; GCN: s_cmpk_lt_u32 s{{[0-9]+}}, 0x800{{$}} 488 define amdgpu_kernel void @commute_br_scc_ugt_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 489 entry: 490 %size = call i32 @llvm.amdgcn.groupstaticsize() 491 %cmp0 = icmp ugt i32 %size, %cond 492 br i1 %cmp0, label %endif, label %if 493 494 if: 495 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 496 br label %endif 497 498 endif: 499 store volatile i32 1, i32 addrspace(1)* %out 500 ret void 501 } 502 503 ; GCN-LABEL: {{^}}commute_br_scc_uge_i32: 504 ; GCN: s_cmpk_le_u32 s{{[0-9]+}}, 0x800{{$}} 505 define amdgpu_kernel void @commute_br_scc_uge_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 506 entry: 507 %size = call i32 @llvm.amdgcn.groupstaticsize() 508 %cmp0 = icmp uge i32 %size, %cond 509 br i1 %cmp0, label %endif, label %if 510 511 if: 512 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 513 br label %endif 514 515 endif: 516 store volatile i32 1, i32 addrspace(1)* %out 517 ret void 518 } 519 520 ; GCN-LABEL: {{^}}commute_br_scc_ult_i32: 521 ; GCN: s_cmpk_gt_u32 s{{[0-9]+}}, 0x800{{$}} 522 define amdgpu_kernel void @commute_br_scc_ult_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 523 entry: 524 %size = call i32 @llvm.amdgcn.groupstaticsize() 525 %cmp0 = icmp ult i32 %size, %cond 526 br i1 %cmp0, label %endif, label %if 527 528 if: 529 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 530 br label %endif 531 532 endif: 533 store volatile i32 1, i32 addrspace(1)* %out 534 ret void 535 } 536 537 ; GCN-LABEL: {{^}}commute_br_scc_ule_i32: 538 ; GCN: s_cmpk_ge_u32 s{{[0-9]+}}, 0x800{{$}} 539 define amdgpu_kernel void @commute_br_scc_ule_i32(i32 %cond, i32 addrspace(1)* %out) #0 { 540 entry: 541 %size = call i32 @llvm.amdgcn.groupstaticsize() 542 %cmp0 = icmp ule i32 %size, %cond 543 br i1 %cmp0, label %endif, label %if 544 545 if: 546 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 547 br label %endif 548 549 endif: 550 store volatile i32 1, i32 addrspace(1)* %out 551 ret void 552 } 553 554 ; GCN-LABEL: {{^}}br_scc_ult_i32_non_u16: 555 ; GCN: s_cmp_lt_u32 s2, 0xfffff7ff 556 define amdgpu_kernel void @br_scc_ult_i32_non_u16(i32 %cond, i32 addrspace(1)* %out) #0 { 557 entry: 558 %size = call i32 @llvm.amdgcn.groupstaticsize() 559 %not.size = xor i32 %size, -1 560 %cmp0 = icmp ult i32 %cond, %not.size 561 br i1 %cmp0, label %endif, label %if 562 563 if: 564 call void asm sideeffect "; $0", "v"([512 x i32] addrspace(3)* @lds) 565 br label %endif 566 567 endif: 568 store volatile i32 1, i32 addrspace(1)* %out 569 ret void 570 } 571 572 ; GCN-LABEL: {{^}}br_scc_eq_i64_inline_imm: 573 ; VI: s_cmp_eq_u64 s{{\[[0-9]+:[0-9]+\]}}, 4 574 575 ; SI: v_cmp_eq_u64_e64 576 define amdgpu_kernel void @br_scc_eq_i64_inline_imm(i64 %cond, i32 addrspace(1)* %out) #0 { 577 entry: 578 %cmp0 = icmp eq i64 %cond, 4 579 br i1 %cmp0, label %endif, label %if 580 581 if: 582 call void asm sideeffect "", ""() 583 br label %endif 584 585 endif: 586 store volatile i32 1, i32 addrspace(1)* %out 587 ret void 588 } 589 590 ; GCN-LABEL: {{^}}br_scc_eq_i64_simm16: 591 ; VI-DAG: s_movk_i32 s[[K_LO:[0-9]+]], 0x4d2 592 ; VI-DAG: s_mov_b32 s[[K_HI:[0-9]+]], 0 593 ; VI: s_cmp_eq_u64 s{{\[[0-9]+:[0-9]+\]}}, s{{\[}}[[K_LO]]:[[K_HI]]{{\]}} 594 595 ; SI: v_cmp_eq_u64_e32 596 define amdgpu_kernel void @br_scc_eq_i64_simm16(i64 %cond, i32 addrspace(1)* %out) #0 { 597 entry: 598 %cmp0 = icmp eq i64 %cond, 1234 599 br i1 %cmp0, label %endif, label %if 600 601 if: 602 call void asm sideeffect "", ""() 603 br label %endif 604 605 endif: 606 store volatile i32 1, i32 addrspace(1)* %out 607 ret void 608 } 609 610 ; GCN-LABEL: {{^}}br_scc_ne_i64_inline_imm: 611 ; VI: s_cmp_lg_u64 s{{\[[0-9]+:[0-9]+\]}}, 4 612 613 ; SI: v_cmp_ne_u64_e64 614 define amdgpu_kernel void @br_scc_ne_i64_inline_imm(i64 %cond, i32 addrspace(1)* %out) #0 { 615 entry: 616 %cmp0 = icmp ne i64 %cond, 4 617 br i1 %cmp0, label %endif, label %if 618 619 if: 620 call void asm sideeffect "", ""() 621 br label %endif 622 623 endif: 624 store volatile i32 1, i32 addrspace(1)* %out 625 ret void 626 } 627 628 ; GCN-LABEL: {{^}}br_scc_ne_i64_simm16: 629 ; VI-DAG: s_movk_i32 s[[K_LO:[0-9]+]], 0x4d2 630 ; VI-DAG: s_mov_b32 s[[K_HI:[0-9]+]], 0 631 ; VI: s_cmp_lg_u64 s{{\[[0-9]+:[0-9]+\]}}, s{{\[}}[[K_LO]]:[[K_HI]]{{\]}} 632 633 ; SI: v_cmp_ne_u64_e32 634 define amdgpu_kernel void @br_scc_ne_i64_simm16(i64 %cond, i32 addrspace(1)* %out) #0 { 635 entry: 636 %cmp0 = icmp ne i64 %cond, 1234 637 br i1 %cmp0, label %endif, label %if 638 639 if: 640 call void asm sideeffect "", ""() 641 br label %endif 642 643 endif: 644 store volatile i32 1, i32 addrspace(1)* %out 645 ret void 646 } 647 648 attributes #0 = { nounwind } 649 attributes #1 = { nounwind readnone } 650