Home | History | Annotate | Download | only in X86
      1 ; RUN: llc -mtriple=i686-unknown-unknown < %s | FileCheck %s
      2 ; RUN: llc -mtriple=i686-unknown-unknown -O0 < %s | FileCheck %s -check-prefix=CHECK0
      3 
      4 %struct.interrupt_frame = type { i32, i32, i32, i32, i32 }
      5 
      6 @llvm.used = appending global [4 x i8*] [i8* bitcast (void (%struct.interrupt_frame*)* @test_isr_no_ecode to i8*), i8* bitcast (void (%struct.interrupt_frame*, i32)* @test_isr_ecode to i8*), i8* bitcast (void (%struct.interrupt_frame*, i32)* @test_isr_clobbers to i8*), i8* bitcast (void (%struct.interrupt_frame*)* @test_isr_x87 to i8*)], section "llvm.metadata"
      7 
      8 ; Spills eax, putting original esp at +4.
      9 ; No stack adjustment if declared with no error code
     10 define x86_intrcc void @test_isr_no_ecode(%struct.interrupt_frame* %frame) {
     11   ; CHECK-LABEL: test_isr_no_ecode:
     12   ; CHECK: pushl %eax
     13   ; CHECK: movl 12(%esp), %eax
     14   ; CHECK: popl %eax
     15   ; CHECK: iretl
     16   ; CHECK0-LABEL: test_isr_no_ecode:
     17   ; CHECK0: pushl %eax
     18   ; CHECK0: leal 4(%esp), %eax
     19   ; CHECK0: movl 8(%eax), %eax
     20   ; CHECK0: popl %eax
     21   ; CHECK0: iretl
     22   %pflags = getelementptr inbounds %struct.interrupt_frame, %struct.interrupt_frame* %frame, i32 0, i32 2
     23   %flags = load i32, i32* %pflags, align 4
     24   call void asm sideeffect "", "r"(i32 %flags)
     25   ret void
     26 }
     27 
     28 ; Spills eax and ecx, putting original esp at +8. Stack is adjusted up another 4 bytes
     29 ; before return, popping the error code.
     30 define x86_intrcc void @test_isr_ecode(%struct.interrupt_frame* %frame, i32 %ecode) {
     31   ; CHECK-LABEL: test_isr_ecode
     32   ; CHECK: pushl %ecx
     33   ; CHECK: pushl %eax
     34   ; CHECK: movl 8(%esp), %eax
     35   ; CHECK: movl 20(%esp), %ecx
     36   ; CHECK: popl %eax
     37   ; CHECK: popl %ecx
     38   ; CHECK: addl $4, %esp
     39   ; CHECK: iretl
     40   ; CHECK0-LABEL: test_isr_ecode
     41   ; CHECK0: pushl %ecx
     42   ; CHECK0: pushl %eax
     43   ; CHECK0: movl 8(%esp), %eax
     44   ; CHECK0: leal 12(%esp), %ecx
     45   ; CHECK0: movl 8(%ecx), %ecx
     46   ; CHECK0: popl %eax
     47   ; CHECK0: popl %ecx
     48   ; CHECK0: addl $4, %esp
     49   ; CHECK0: iretl
     50   %pflags = getelementptr inbounds %struct.interrupt_frame, %struct.interrupt_frame* %frame, i32 0, i32 2
     51   %flags = load i32, i32* %pflags, align 4
     52   call x86_fastcallcc void asm sideeffect "", "r,r"(i32 %flags, i32 %ecode)
     53   ret void
     54 }
     55 
     56 ; All clobbered registers must be saved
     57 define x86_intrcc void @test_isr_clobbers(%struct.interrupt_frame* %frame, i32 %ecode) {
     58   call void asm sideeffect "", "~{eax},~{ebx},~{ebp}"()
     59   ; CHECK-LABEL: test_isr_clobbers
     60   ; CHECK-SSE-NEXT: pushl %ebp
     61   ; CHECK-SSE-NEXT: pushl %ebx
     62   ; CHECK-SSE-NEXT; pushl %eax
     63   ; CHECK-SSE-NEXT: popl %eax
     64   ; CHECK-SSE-NEXT: popl %ebx
     65   ; CHECK-SSE-NEXT: popl %ebp
     66   ; CHECK-SSE-NEXT: addl $4, %esp
     67   ; CHECK-SSE-NEXT: iretl
     68   ; CHECK0-LABEL: test_isr_clobbers
     69   ; CHECK0-SSE-NEXT: pushl %ebp
     70   ; CHECK0-SSE-NEXT: pushl %ebx
     71   ; CHECK0-SSE-NEXT; pushl %eax
     72   ; CHECK0-SSE-NEXT: popl %eax
     73   ; CHECK0-SSE-NEXT: popl %ebx
     74   ; CHECK0-SSE-NEXT: popl %ebp
     75   ; CHECK0-SSE-NEXT: addl $4, %esp
     76   ; CHECK0-SSE-NEXT: iretl
     77   ret void
     78 }
     79 
     80 @f80 = common global x86_fp80 0xK00000000000000000000, align 4
     81 
     82 ; Test that the presence of x87 does not crash the FP stackifier
     83 define x86_intrcc void @test_isr_x87(%struct.interrupt_frame* %frame) {
     84   ; CHECK-LABEL: test_isr_x87
     85   ; CHECK-DAG: fldt f80
     86   ; CHECK-DAG: fld1
     87   ; CHECK: faddp
     88   ; CHECK-NEXT: fstpt f80
     89   ; CHECK-NEXT: iretl
     90 entry:
     91   %ld = load x86_fp80, x86_fp80* @f80, align 4
     92   %add = fadd x86_fp80 %ld, 0xK3FFF8000000000000000
     93   store x86_fp80 %add, x86_fp80* @f80, align 4
     94   ret void
     95 }
     96