Home | History | Annotate | Download | only in X86
      1 ; RUN: llc < %s -march=x86-64 -mcpu=core2 -x86-early-ifcvt -enable-misched \
      2 ; RUN:          -misched=shuffle -misched-bottomup -verify-machineinstrs \
      3 ; RUN:     | FileCheck %s
      4 ; RUN: llc < %s -march=x86-64 -mcpu=core2 -x86-early-ifcvt -enable-misched \
      5 ; RUN:          -misched=shuffle -misched-topdown -verify-machineinstrs \
      6 ; RUN:     | FileCheck %s --check-prefix TOPDOWN
      7 ; REQUIRES: asserts
      8 ;
      9 ; Interesting MachineScheduler cases.
     10 ;
     11 ; FIXME: There should be an assert in the coalescer that we're not rematting
     12 ; "not-quite-dead" copies, but that breaks a lot of tests <rdar://problem/11148682>.
     13 
     14 declare void @llvm.memcpy.p0i8.p0i8.i64(i8* nocapture, i8* nocapture, i64, i32, i1) nounwind
     15 
     16 ; From oggenc.
     17 ; After coalescing, we have a dead superreg (RAX) definition.
     18 ;
     19 ; CHECK: xorl %esi, %esi
     20 ; CHECK: movl $32, %ecx
     21 ; CHECK: rep;movsl
     22 define fastcc void @_preextrapolate_helper() nounwind uwtable ssp {
     23 entry:
     24   br i1 undef, label %for.cond.preheader, label %if.end
     25 
     26 for.cond.preheader:                               ; preds = %entry
     27   call void @llvm.memcpy.p0i8.p0i8.i64(i8* undef, i8* null, i64 128, i32 4, i1 false) nounwind
     28   unreachable
     29 
     30 if.end:                                           ; preds = %entry
     31   ret void
     32 }
     33 
     34 ; The machine verifier checks that EFLAGS kill flags are updated when
     35 ; the scheduler reorders cmovel instructions.
     36 ;
     37 ; CHECK: test
     38 ; CHECK: cmovel
     39 ; CHECK: cmovel
     40 ; CHECK: call
     41 define void @foo(i32 %b) nounwind uwtable ssp {
     42 entry:
     43   %tobool = icmp ne i32 %b, 0
     44   br i1 %tobool, label %if.then, label %if.end
     45 
     46 if.then:                                          ; preds = %entry
     47   br label %if.end
     48 
     49 if.end:                                           ; preds = %if.then, %entry
     50   %v1 = phi i32 [1, %entry], [2, %if.then]
     51   %v2 = phi i32 [3, %entry], [4, %if.then]
     52   call void @bar(i32 %v1, i32 %v2)
     53   ret void
     54 }
     55 
     56 declare void @bar(i32,i32)
     57 
     58 ; Test that the DAG builder can handle an undef vreg on ExitSU.
     59 ; CHECK: hasundef
     60 ; CHECK: call
     61 
     62 %t0 = type { i32, i32, i8 }
     63 %t6 = type { i32 (...)**, %t7* }
     64 %t7 = type { i32 (...)** }
     65 
     66 define void @hasundef() unnamed_addr uwtable ssp align 2 {
     67   %1 = alloca %t0, align 8
     68   br i1 undef, label %3, label %2
     69 
     70 ; <label>:2                                       ; preds = %0
     71   unreachable
     72 
     73 ; <label>:3                                       ; preds = %0
     74   br i1 undef, label %4, label %5
     75 
     76 ; <label>:4                                       ; preds = %3
     77   call void undef(%t6* undef, %t0* %1)
     78   unreachable
     79 
     80 ; <label>:5                                       ; preds = %3
     81   ret void
     82 }
     83 
     84 ; Test top-down subregister liveness tracking. Self-verification
     85 ; catches any pressure set underflow.
     86 ; rdar://12797931.
     87 ;
     88 ; TOPDOWN: @testSubregTracking
     89 ; TOPDOWN: divb
     90 ; TOPDOWN: movzbl %al
     91 ; TOPDOWN: ret
     92 define void @testSubregTracking() nounwind uwtable ssp align 2 {
     93   %tmp = load i8, i8* undef, align 1
     94   %tmp6 = sub i8 0, %tmp
     95   %tmp7 = load i8, i8* undef, align 1
     96   %tmp8 = udiv i8 %tmp6, %tmp7
     97   %tmp9 = zext i8 %tmp8 to i64
     98   %tmp10 = load i8, i8* undef, align 1
     99   %tmp11 = zext i8 %tmp10 to i64
    100   %tmp12 = mul i64 %tmp11, %tmp9
    101   %tmp13 = urem i8 %tmp6, %tmp7
    102   %tmp14 = zext i8 %tmp13 to i32
    103   %tmp15 = add nsw i32 %tmp14, 0
    104   %tmp16 = add i32 %tmp15, 0
    105   store i32 %tmp16, i32* undef, align 4
    106   %tmp17 = add i64 0, %tmp12
    107   store i64 %tmp17, i64* undef, align 8
    108   ret void
    109 }
    110