Home | History | Annotate | Download | only in X86
      1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
      2 ; RUN: llc -mtriple x86_64-apple-macosx10.13.0 < %s | FileCheck %s --check-prefix=X86_64
      3 ; RUN: llc -mtriple i386-apple-macosx10.13.0 < %s | FileCheck %s --check-prefix=X86
      4 
      5 ; The MacOS tripples are used to get trapping behavior on the "unreachable" IR
      6 ; instruction, so that the placement of the ud2 instruction could be verified.
      7 
      8 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
      9 ;; The IR was created using the following C code:
     10 ;; typedef void *jmp_buf;
     11 ;; jmp_buf buf;
     12 ;;
     13 ;; __attribute__((noinline)) int bar(int i) {
     14 ;;   int j = i - 111;
     15 ;;   __builtin_longjmp(&buf, 1);
     16 ;;   return j;
     17 ;; }
     18 ;;
     19 ;; int foo(int i) {
     20 ;;   int j = i * 11;
     21 ;;   if (!__builtin_setjmp(&buf)) {
     22 ;;     j += 33 + bar(j);
     23 ;;   }
     24 ;;   return j + i;
     25 ;; }
     26 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
     27 
     28 @buf = common local_unnamed_addr global i8* null, align 8
     29 
     30 ; Functions that use LongJmp should fix the Shadow Stack using previosuly saved
     31 ; ShadowStackPointer in the input buffer.
     32 ; The fix requires unwinding the shadow stack to the last SSP.
     33 define i32 @bar(i32 %i) local_unnamed_addr {
     34 ; X86_64-LABEL: bar:
     35 ; X86_64:       ## %bb.0: ## %entry
     36 ; X86_64-NEXT:    pushq %rbp
     37 ; X86_64-NEXT:    .cfi_def_cfa_offset 16
     38 ; X86_64-NEXT:    .cfi_offset %rbp, -16
     39 ; X86_64-NEXT:    movq _buf@{{.*}}(%rip), %rax
     40 ; X86_64-NEXT:    movq (%rax), %rax
     41 ; X86_64-NEXT:    xorq %rdx, %rdx
     42 ; X86_64-NEXT:    rdsspq %rdx
     43 ; X86_64-NEXT:    testq %rdx, %rdx
     44 ; X86_64-NEXT:    je LBB0_5
     45 ; X86_64-NEXT:  ## %bb.1: ## %entry
     46 ; X86_64-NEXT:    movq 24(%rax), %rcx
     47 ; X86_64-NEXT:    subq %rdx, %rcx
     48 ; X86_64-NEXT:    jbe LBB0_5
     49 ; X86_64-NEXT:  ## %bb.2: ## %entry
     50 ; X86_64-NEXT:    shrq $3, %rcx
     51 ; X86_64-NEXT:    incsspq %rcx
     52 ; X86_64-NEXT:    shrq $8, %rcx
     53 ; X86_64-NEXT:    je LBB0_5
     54 ; X86_64-NEXT:  ## %bb.3: ## %entry
     55 ; X86_64-NEXT:    shlq %rcx
     56 ; X86_64-NEXT:    movq $128, %rdx
     57 ; X86_64-NEXT:  LBB0_4: ## %entry
     58 ; X86_64-NEXT:    ## =>This Inner Loop Header: Depth=1
     59 ; X86_64-NEXT:    incsspq %rdx
     60 ; X86_64-NEXT:    decq %rcx
     61 ; X86_64-NEXT:    jne LBB0_4
     62 ; X86_64-NEXT:  LBB0_5: ## %entry
     63 ; X86_64-NEXT:    movq (%rax), %rbp
     64 ; X86_64-NEXT:    movq 8(%rax), %rcx
     65 ; X86_64-NEXT:    movq 16(%rax), %rsp
     66 ; X86_64-NEXT:    jmpq *%rcx
     67 ;
     68 ; X86-LABEL: bar:
     69 ; X86:       ## %bb.0: ## %entry
     70 ; X86-NEXT:    pushl %ebp
     71 ; X86-NEXT:    .cfi_def_cfa_offset 8
     72 ; X86-NEXT:    .cfi_offset %ebp, -8
     73 ; X86-NEXT:    movl L_buf$non_lazy_ptr, %eax
     74 ; X86-NEXT:    movl (%eax), %eax
     75 ; X86-NEXT:    xorl %edx, %edx
     76 ; X86-NEXT:    rdsspd %edx
     77 ; X86-NEXT:    testl %edx, %edx
     78 ; X86-NEXT:    je LBB0_5
     79 ; X86-NEXT:  ## %bb.1: ## %entry
     80 ; X86-NEXT:    movl 12(%eax), %ecx
     81 ; X86-NEXT:    subl %edx, %ecx
     82 ; X86-NEXT:    jbe LBB0_5
     83 ; X86-NEXT:  ## %bb.2: ## %entry
     84 ; X86-NEXT:    shrl $2, %ecx
     85 ; X86-NEXT:    incsspd %ecx
     86 ; X86-NEXT:    shrl $8, %ecx
     87 ; X86-NEXT:    je LBB0_5
     88 ; X86-NEXT:  ## %bb.3: ## %entry
     89 ; X86-NEXT:    shll %ecx
     90 ; X86-NEXT:    movl $128, %edx
     91 ; X86-NEXT:  LBB0_4: ## %entry
     92 ; X86-NEXT:    ## =>This Inner Loop Header: Depth=1
     93 ; X86-NEXT:    incsspd %edx
     94 ; X86-NEXT:    decl %ecx
     95 ; X86-NEXT:    jne LBB0_4
     96 ; X86-NEXT:  LBB0_5: ## %entry
     97 ; X86-NEXT:    movl (%eax), %ebp
     98 ; X86-NEXT:    movl 4(%eax), %ecx
     99 ; X86-NEXT:    movl 8(%eax), %esp
    100 ; X86-NEXT:    jmpl *%ecx
    101 entry:
    102   %0 = load i8*, i8** @buf, align 8
    103   tail call void @llvm.eh.sjlj.longjmp(i8* %0)
    104   unreachable
    105 }
    106 
    107 declare void @llvm.eh.sjlj.longjmp(i8*)
    108 
    109 ; Functions that call SetJmp should save the current ShadowStackPointer for
    110 ; future fixing of the Shadow Stack.
    111 define i32 @foo(i32 %i) local_unnamed_addr {
    112 ; X86_64-LABEL: foo:
    113 ; X86_64:       ## %bb.0: ## %entry
    114 ; X86_64-NEXT:    pushq %rbp
    115 ; X86_64-NEXT:    .cfi_def_cfa_offset 16
    116 ; X86_64-NEXT:    .cfi_offset %rbp, -16
    117 ; X86_64-NEXT:    movq %rsp, %rbp
    118 ; X86_64-NEXT:    .cfi_def_cfa_register %rbp
    119 ; X86_64-NEXT:    pushq %r15
    120 ; X86_64-NEXT:    pushq %r14
    121 ; X86_64-NEXT:    pushq %r13
    122 ; X86_64-NEXT:    pushq %r12
    123 ; X86_64-NEXT:    pushq %rbx
    124 ; X86_64-NEXT:    pushq %rax
    125 ; X86_64-NEXT:    .cfi_offset %rbx, -56
    126 ; X86_64-NEXT:    .cfi_offset %r12, -48
    127 ; X86_64-NEXT:    .cfi_offset %r13, -40
    128 ; X86_64-NEXT:    .cfi_offset %r14, -32
    129 ; X86_64-NEXT:    .cfi_offset %r15, -24
    130 ; X86_64-NEXT:    ## kill: def $edi killed $edi def $rdi
    131 ; X86_64-NEXT:    movq %rdi, {{[-0-9]+}}(%r{{[sb]}}p) ## 8-byte Spill
    132 ; X86_64-NEXT:    movq _buf@{{.*}}(%rip), %rax
    133 ; X86_64-NEXT:    movq (%rax), %rax
    134 ; X86_64-NEXT:    movq %rbp, (%rax)
    135 ; X86_64-NEXT:    movq %rsp, 16(%rax)
    136 ; X86_64-NEXT:    leaq {{.*}}(%rip), %rcx
    137 ; X86_64-NEXT:    movq %rcx, 8(%rax)
    138 ; X86_64-NEXT:    xorq %rcx, %rcx
    139 ; X86_64-NEXT:    rdsspq %rcx
    140 ; X86_64-NEXT:    movq %rcx, 24(%rax)
    141 ; X86_64-NEXT:    #EH_SjLj_Setup LBB1_4
    142 ; X86_64-NEXT:  ## %bb.1: ## %entry
    143 ; X86_64-NEXT:    xorl %eax, %eax
    144 ; X86_64-NEXT:    testl %eax, %eax
    145 ; X86_64-NEXT:    jne LBB1_3
    146 ; X86_64-NEXT:    jmp LBB1_5
    147 ; X86_64-NEXT:  LBB1_4: ## Block address taken
    148 ; X86_64-NEXT:    ## %entry
    149 ; X86_64-NEXT:    movl $1, %eax
    150 ; X86_64-NEXT:    testl %eax, %eax
    151 ; X86_64-NEXT:    je LBB1_5
    152 ; X86_64-NEXT:  LBB1_3: ## %if.end
    153 ; X86_64-NEXT:    movq {{[-0-9]+}}(%r{{[sb]}}p), %rax ## 8-byte Reload
    154 ; X86_64-NEXT:    shll $2, %eax
    155 ; X86_64-NEXT:    leal (%rax,%rax,2), %eax
    156 ; X86_64-NEXT:    addq $8, %rsp
    157 ; X86_64-NEXT:    popq %rbx
    158 ; X86_64-NEXT:    popq %r12
    159 ; X86_64-NEXT:    popq %r13
    160 ; X86_64-NEXT:    popq %r14
    161 ; X86_64-NEXT:    popq %r15
    162 ; X86_64-NEXT:    popq %rbp
    163 ; X86_64-NEXT:    retq
    164 ; X86_64-NEXT:  LBB1_5: ## %if.then
    165 ; X86_64-NEXT:    callq _bar
    166 ; X86_64-NEXT:    ud2
    167 ;
    168 ; X86-LABEL: foo:
    169 ; X86:       ## %bb.0: ## %entry
    170 ; X86-NEXT:    pushl %ebp
    171 ; X86-NEXT:    .cfi_def_cfa_offset 8
    172 ; X86-NEXT:    .cfi_offset %ebp, -8
    173 ; X86-NEXT:    movl %esp, %ebp
    174 ; X86-NEXT:    .cfi_def_cfa_register %ebp
    175 ; X86-NEXT:    pushl %ebx
    176 ; X86-NEXT:    pushl %edi
    177 ; X86-NEXT:    pushl %esi
    178 ; X86-NEXT:    subl $12, %esp
    179 ; X86-NEXT:    .cfi_offset %esi, -20
    180 ; X86-NEXT:    .cfi_offset %edi, -16
    181 ; X86-NEXT:    .cfi_offset %ebx, -12
    182 ; X86-NEXT:    movl L_buf$non_lazy_ptr, %eax
    183 ; X86-NEXT:    movl (%eax), %eax
    184 ; X86-NEXT:    movl %ebp, (%eax)
    185 ; X86-NEXT:    movl %esp, 16(%eax)
    186 ; X86-NEXT:    movl $LBB1_4, 4(%eax)
    187 ; X86-NEXT:    xorl %ecx, %ecx
    188 ; X86-NEXT:    rdsspd %ecx
    189 ; X86-NEXT:    movl %ecx, 12(%eax)
    190 ; X86-NEXT:    #EH_SjLj_Setup LBB1_4
    191 ; X86-NEXT:  ## %bb.1: ## %entry
    192 ; X86-NEXT:    xorl %eax, %eax
    193 ; X86-NEXT:    testl %eax, %eax
    194 ; X86-NEXT:    jne LBB1_3
    195 ; X86-NEXT:    jmp LBB1_5
    196 ; X86-NEXT:  LBB1_4: ## Block address taken
    197 ; X86-NEXT:    ## %entry
    198 ; X86-NEXT:    movl $1, %eax
    199 ; X86-NEXT:    testl %eax, %eax
    200 ; X86-NEXT:    je LBB1_5
    201 ; X86-NEXT:  LBB1_3: ## %if.end
    202 ; X86-NEXT:    movl 8(%ebp), %eax
    203 ; X86-NEXT:    shll $2, %eax
    204 ; X86-NEXT:    leal (%eax,%eax,2), %eax
    205 ; X86-NEXT:    addl $12, %esp
    206 ; X86-NEXT:    popl %esi
    207 ; X86-NEXT:    popl %edi
    208 ; X86-NEXT:    popl %ebx
    209 ; X86-NEXT:    popl %ebp
    210 ; X86-NEXT:    retl
    211 ; X86-NEXT:  LBB1_5: ## %if.then
    212 ; X86-NEXT:    calll _bar
    213 ; X86-NEXT:    ud2
    214 entry:
    215   %0 = load i8*, i8** @buf, align 8
    216   %1 = bitcast i8* %0 to i8**
    217   %2 = tail call i8* @llvm.frameaddress(i32 0)
    218   store i8* %2, i8** %1, align 8
    219   %3 = tail call i8* @llvm.stacksave()
    220   %4 = getelementptr inbounds i8, i8* %0, i64 16
    221   %5 = bitcast i8* %4 to i8**
    222   store i8* %3, i8** %5, align 8
    223   %6 = tail call i32 @llvm.eh.sjlj.setjmp(i8* %0)
    224   %tobool = icmp eq i32 %6, 0
    225   br i1 %tobool, label %if.then, label %if.end
    226 
    227 if.then:                                          ; preds = %entry
    228   %call = tail call i32 @bar(i32 undef)
    229   unreachable
    230 
    231 if.end:                                           ; preds = %entry
    232   %add2 = mul nsw i32 %i, 12
    233   ret i32 %add2
    234 }
    235 
    236 declare i8* @llvm.frameaddress(i32)
    237 declare i8* @llvm.stacksave()
    238 declare i32 @llvm.eh.sjlj.setjmp(i8*)
    239 
    240 !llvm.module.flags = !{!0}
    241 
    242 !0 = !{i32 4, !"cf-protection-return", i32 1}
    243