Home | History | Annotate | Download | only in X86
      1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
      2 ; RUN: llc < %s -mtriple=x86_64-unknown-linux-gnu -x86-speculative-load-hardening -data-sections | FileCheck %s --check-prefix=X64
      3 ; RUN: llc < %s -mtriple=x86_64-unknown-linux-gnu -x86-speculative-load-hardening -data-sections -mattr=+retpoline | FileCheck %s --check-prefix=X64-RETPOLINE
      4 ;
      5 ; FIXME: Add support for 32-bit.
      6 
      7 @global_fnptr = external global i32 ()*
      8 
      9 @global_blockaddrs = constant [4 x i8*] [
     10   i8* blockaddress(@test_indirectbr_global, %bb0),
     11   i8* blockaddress(@test_indirectbr_global, %bb1),
     12   i8* blockaddress(@test_indirectbr_global, %bb2),
     13   i8* blockaddress(@test_indirectbr_global, %bb3)
     14 ]
     15 
     16 define i32 @test_indirect_call(i32 ()** %ptr) nounwind {
     17 ; X64-LABEL: test_indirect_call:
     18 ; X64:       # %bb.0: # %entry
     19 ; X64-NEXT:    pushq %rax
     20 ; X64-NEXT:    movq %rsp, %rax
     21 ; X64-NEXT:    movq $-1, %rcx
     22 ; X64-NEXT:    sarq $63, %rax
     23 ; X64-NEXT:    movq (%rdi), %rcx
     24 ; X64-NEXT:    orq %rax, %rcx
     25 ; X64-NEXT:    shlq $47, %rax
     26 ; X64-NEXT:    orq %rax, %rsp
     27 ; X64-NEXT:    callq *%rcx
     28 ; X64-NEXT:    movq %rsp, %rcx
     29 ; X64-NEXT:    sarq $63, %rcx
     30 ; X64-NEXT:    shlq $47, %rcx
     31 ; X64-NEXT:    orq %rcx, %rsp
     32 ; X64-NEXT:    popq %rcx
     33 ; X64-NEXT:    retq
     34 ;
     35 ; X64-RETPOLINE-LABEL: test_indirect_call:
     36 ; X64-RETPOLINE:       # %bb.0: # %entry
     37 ; X64-RETPOLINE-NEXT:    pushq %rax
     38 ; X64-RETPOLINE-NEXT:    movq %rsp, %rax
     39 ; X64-RETPOLINE-NEXT:    movq $-1, %rcx
     40 ; X64-RETPOLINE-NEXT:    sarq $63, %rax
     41 ; X64-RETPOLINE-NEXT:    movq (%rdi), %r11
     42 ; X64-RETPOLINE-NEXT:    orq %rax, %r11
     43 ; X64-RETPOLINE-NEXT:    shlq $47, %rax
     44 ; X64-RETPOLINE-NEXT:    orq %rax, %rsp
     45 ; X64-RETPOLINE-NEXT:    callq __llvm_retpoline_r11
     46 ; X64-RETPOLINE-NEXT:    movq %rsp, %rcx
     47 ; X64-RETPOLINE-NEXT:    sarq $63, %rcx
     48 ; X64-RETPOLINE-NEXT:    shlq $47, %rcx
     49 ; X64-RETPOLINE-NEXT:    orq %rcx, %rsp
     50 ; X64-RETPOLINE-NEXT:    popq %rcx
     51 ; X64-RETPOLINE-NEXT:    retq
     52 entry:
     53   %fp = load i32 ()*, i32 ()** %ptr
     54   %v = call i32 %fp()
     55   ret i32 %v
     56 }
     57 
     58 define i32 @test_indirect_tail_call(i32 ()** %ptr) nounwind {
     59 ; X64-LABEL: test_indirect_tail_call:
     60 ; X64:       # %bb.0: # %entry
     61 ; X64-NEXT:    movq %rsp, %rax
     62 ; X64-NEXT:    movq $-1, %rcx
     63 ; X64-NEXT:    sarq $63, %rax
     64 ; X64-NEXT:    movq (%rdi), %rcx
     65 ; X64-NEXT:    orq %rax, %rcx
     66 ; X64-NEXT:    shlq $47, %rax
     67 ; X64-NEXT:    orq %rax, %rsp
     68 ; X64-NEXT:    jmpq *%rcx # TAILCALL
     69 ;
     70 ; X64-RETPOLINE-LABEL: test_indirect_tail_call:
     71 ; X64-RETPOLINE:       # %bb.0: # %entry
     72 ; X64-RETPOLINE-NEXT:    movq %rsp, %rax
     73 ; X64-RETPOLINE-NEXT:    movq $-1, %rcx
     74 ; X64-RETPOLINE-NEXT:    sarq $63, %rax
     75 ; X64-RETPOLINE-NEXT:    movq (%rdi), %r11
     76 ; X64-RETPOLINE-NEXT:    orq %rax, %r11
     77 ; X64-RETPOLINE-NEXT:    shlq $47, %rax
     78 ; X64-RETPOLINE-NEXT:    orq %rax, %rsp
     79 ; X64-RETPOLINE-NEXT:    jmp __llvm_retpoline_r11 # TAILCALL
     80 entry:
     81   %fp = load i32 ()*, i32 ()** %ptr
     82   %v = tail call i32 %fp()
     83   ret i32 %v
     84 }
     85 
     86 define i32 @test_indirect_call_global() nounwind {
     87 ; X64-LABEL: test_indirect_call_global:
     88 ; X64:       # %bb.0: # %entry
     89 ; X64-NEXT:    pushq %rax
     90 ; X64-NEXT:    movq %rsp, %rax
     91 ; X64-NEXT:    movq $-1, %rcx
     92 ; X64-NEXT:    sarq $63, %rax
     93 ; X64-NEXT:    movq {{.*}}(%rip), %rcx
     94 ; X64-NEXT:    orq %rax, %rcx
     95 ; X64-NEXT:    shlq $47, %rax
     96 ; X64-NEXT:    orq %rax, %rsp
     97 ; X64-NEXT:    callq *%rcx
     98 ; X64-NEXT:    movq %rsp, %rcx
     99 ; X64-NEXT:    sarq $63, %rcx
    100 ; X64-NEXT:    shlq $47, %rcx
    101 ; X64-NEXT:    orq %rcx, %rsp
    102 ; X64-NEXT:    popq %rcx
    103 ; X64-NEXT:    retq
    104 ;
    105 ; X64-RETPOLINE-LABEL: test_indirect_call_global:
    106 ; X64-RETPOLINE:       # %bb.0: # %entry
    107 ; X64-RETPOLINE-NEXT:    pushq %rax
    108 ; X64-RETPOLINE-NEXT:    movq %rsp, %rax
    109 ; X64-RETPOLINE-NEXT:    movq $-1, %rcx
    110 ; X64-RETPOLINE-NEXT:    sarq $63, %rax
    111 ; X64-RETPOLINE-NEXT:    movq {{.*}}(%rip), %r11
    112 ; X64-RETPOLINE-NEXT:    shlq $47, %rax
    113 ; X64-RETPOLINE-NEXT:    orq %rax, %rsp
    114 ; X64-RETPOLINE-NEXT:    callq __llvm_retpoline_r11
    115 ; X64-RETPOLINE-NEXT:    movq %rsp, %rcx
    116 ; X64-RETPOLINE-NEXT:    sarq $63, %rcx
    117 ; X64-RETPOLINE-NEXT:    shlq $47, %rcx
    118 ; X64-RETPOLINE-NEXT:    orq %rcx, %rsp
    119 ; X64-RETPOLINE-NEXT:    popq %rcx
    120 ; X64-RETPOLINE-NEXT:    retq
    121 entry:
    122   %fp = load i32 ()*, i32 ()** @global_fnptr
    123   %v = call i32 %fp()
    124   ret i32 %v
    125 }
    126 
    127 define i32 @test_indirect_tail_call_global() nounwind {
    128 ; X64-LABEL: test_indirect_tail_call_global:
    129 ; X64:       # %bb.0: # %entry
    130 ; X64-NEXT:    movq %rsp, %rax
    131 ; X64-NEXT:    movq $-1, %rcx
    132 ; X64-NEXT:    sarq $63, %rax
    133 ; X64-NEXT:    movq {{.*}}(%rip), %rcx
    134 ; X64-NEXT:    orq %rax, %rcx
    135 ; X64-NEXT:    shlq $47, %rax
    136 ; X64-NEXT:    orq %rax, %rsp
    137 ; X64-NEXT:    jmpq *%rcx # TAILCALL
    138 ;
    139 ; X64-RETPOLINE-LABEL: test_indirect_tail_call_global:
    140 ; X64-RETPOLINE:       # %bb.0: # %entry
    141 ; X64-RETPOLINE-NEXT:    movq %rsp, %rax
    142 ; X64-RETPOLINE-NEXT:    movq $-1, %rcx
    143 ; X64-RETPOLINE-NEXT:    sarq $63, %rax
    144 ; X64-RETPOLINE-NEXT:    movq {{.*}}(%rip), %r11
    145 ; X64-RETPOLINE-NEXT:    shlq $47, %rax
    146 ; X64-RETPOLINE-NEXT:    orq %rax, %rsp
    147 ; X64-RETPOLINE-NEXT:    jmp __llvm_retpoline_r11 # TAILCALL
    148 entry:
    149   %fp = load i32 ()*, i32 ()** @global_fnptr
    150   %v = tail call i32 %fp()
    151   ret i32 %v
    152 }
    153 
    154 define i32 @test_indirectbr(i8** %ptr) nounwind {
    155 ; X64-LABEL: test_indirectbr:
    156 ; X64:       # %bb.0: # %entry
    157 ; X64-NEXT:    movq %rsp, %rcx
    158 ; X64-NEXT:    movq $-1, %rax
    159 ; X64-NEXT:    sarq $63, %rcx
    160 ; X64-NEXT:    movq (%rdi), %rax
    161 ; X64-NEXT:    orq %rcx, %rax
    162 ; X64-NEXT:    jmpq *%rax
    163 ; X64-NEXT:  .LBB4_1: # %bb0
    164 ; X64-NEXT:    movl $2, %eax
    165 ; X64-NEXT:    jmp .LBB4_2
    166 ; X64-NEXT:  .LBB4_4: # %bb2
    167 ; X64-NEXT:    movl $13, %eax
    168 ; X64-NEXT:    jmp .LBB4_2
    169 ; X64-NEXT:  .LBB4_5: # %bb3
    170 ; X64-NEXT:    movl $42, %eax
    171 ; X64-NEXT:    jmp .LBB4_2
    172 ; X64-NEXT:  .LBB4_3: # %bb1
    173 ; X64-NEXT:    movl $7, %eax
    174 ; X64-NEXT:  .LBB4_2: # %bb0
    175 ; X64-NEXT:    shlq $47, %rcx
    176 ; X64-NEXT:    orq %rcx, %rsp
    177 ; X64-NEXT:    retq
    178 ;
    179 ; X64-RETPOLINE-LABEL: test_indirectbr:
    180 ; X64-RETPOLINE:       # %bb.0: # %entry
    181 entry:
    182   %a = load i8*, i8** %ptr
    183   indirectbr i8* %a, [ label %bb0, label %bb1, label %bb2, label %bb3 ]
    184 
    185 bb0:
    186   ret i32 2
    187 
    188 bb1:
    189   ret i32 7
    190 
    191 bb2:
    192   ret i32 13
    193 
    194 bb3:
    195   ret i32 42
    196 }
    197 
    198 define i32 @test_indirectbr_global(i32 %idx) nounwind {
    199 ; X64-LABEL: test_indirectbr_global:
    200 ; X64:       # %bb.0: # %entry
    201 ; X64-NEXT:    movq %rsp, %rcx
    202 ; X64-NEXT:    movq $-1, %rax
    203 ; X64-NEXT:    sarq $63, %rcx
    204 ; X64-NEXT:    movslq %edi, %rax
    205 ; X64-NEXT:    movq global_blockaddrs(,%rax,8), %rax
    206 ; X64-NEXT:    orq %rcx, %rax
    207 ; X64-NEXT:    jmpq *%rax
    208 ; X64-NEXT:  .Ltmp0: # Block address taken
    209 ; X64-NEXT:  .LBB5_1: # %bb0
    210 ; X64-NEXT:    movl $2, %eax
    211 ; X64-NEXT:    jmp .LBB5_2
    212 ; X64-NEXT:  .Ltmp1: # Block address taken
    213 ; X64-NEXT:  .LBB5_4: # %bb2
    214 ; X64-NEXT:    movl $13, %eax
    215 ; X64-NEXT:    jmp .LBB5_2
    216 ; X64-NEXT:  .Ltmp2: # Block address taken
    217 ; X64-NEXT:  .LBB5_5: # %bb3
    218 ; X64-NEXT:    movl $42, %eax
    219 ; X64-NEXT:    jmp .LBB5_2
    220 ; X64-NEXT:  .Ltmp3: # Block address taken
    221 ; X64-NEXT:  .LBB5_3: # %bb1
    222 ; X64-NEXT:    movl $7, %eax
    223 ; X64-NEXT:  .LBB5_2: # %bb0
    224 ; X64-NEXT:    shlq $47, %rcx
    225 ; X64-NEXT:    orq %rcx, %rsp
    226 ; X64-NEXT:    retq
    227 ;
    228 ; X64-RETPOLINE-LABEL: test_indirectbr_global:
    229 ; X64-RETPOLINE:       # %bb.0: # %entry
    230 ; X64-RETPOLINE-NEXT:    movq %rsp, %rcx
    231 ; X64-RETPOLINE-NEXT:    movq $-1, %rax
    232 ; X64-RETPOLINE-NEXT:    sarq $63, %rcx
    233 ; X64-RETPOLINE-NEXT:    movslq %edi, %rdx
    234 ; X64-RETPOLINE-NEXT:    movq global_blockaddrs(,%rdx,8), %rdx
    235 ; X64-RETPOLINE-NEXT:    orq %rcx, %rdx
    236 ; X64-RETPOLINE-NEXT:    cmpq $2, %rdx
    237 ; X64-RETPOLINE-NEXT:    je .LBB6_5
    238 ; X64-RETPOLINE-NEXT:  # %bb.1: # %entry
    239 ; X64-RETPOLINE-NEXT:    cmoveq %rax, %rcx
    240 ; X64-RETPOLINE-NEXT:    cmpq $3, %rdx
    241 ; X64-RETPOLINE-NEXT:    je .LBB6_6
    242 ; X64-RETPOLINE-NEXT:  # %bb.2: # %entry
    243 ; X64-RETPOLINE-NEXT:    cmoveq %rax, %rcx
    244 ; X64-RETPOLINE-NEXT:    cmpq $4, %rdx
    245 ; X64-RETPOLINE-NEXT:    jne .LBB6_3
    246 ; X64-RETPOLINE-NEXT:  .Ltmp0: # Block address taken
    247 ; X64-RETPOLINE-NEXT:  # %bb.7: # %bb3
    248 ; X64-RETPOLINE-NEXT:    cmovneq %rax, %rcx
    249 ; X64-RETPOLINE-NEXT:    movl $42, %eax
    250 ; X64-RETPOLINE-NEXT:    jmp .LBB6_4
    251 ; X64-RETPOLINE-NEXT:  .Ltmp1: # Block address taken
    252 ; X64-RETPOLINE-NEXT:  .LBB6_5: # %bb1
    253 ; X64-RETPOLINE-NEXT:    cmovneq %rax, %rcx
    254 ; X64-RETPOLINE-NEXT:    movl $7, %eax
    255 ; X64-RETPOLINE-NEXT:    jmp .LBB6_4
    256 ; X64-RETPOLINE-NEXT:  .Ltmp2: # Block address taken
    257 ; X64-RETPOLINE-NEXT:  .LBB6_6: # %bb2
    258 ; X64-RETPOLINE-NEXT:    cmovneq %rax, %rcx
    259 ; X64-RETPOLINE-NEXT:    movl $13, %eax
    260 ; X64-RETPOLINE-NEXT:    jmp .LBB6_4
    261 ; X64-RETPOLINE-NEXT:  .Ltmp3: # Block address taken
    262 ; X64-RETPOLINE-NEXT:  .LBB6_3: # %bb0
    263 ; X64-RETPOLINE-NEXT:    cmoveq %rax, %rcx
    264 ; X64-RETPOLINE-NEXT:    movl $2, %eax
    265 ; X64-RETPOLINE-NEXT:  .LBB6_4: # %bb0
    266 ; X64-RETPOLINE-NEXT:    shlq $47, %rcx
    267 ; X64-RETPOLINE-NEXT:    orq %rcx, %rsp
    268 ; X64-RETPOLINE-NEXT:    retq
    269 entry:
    270   %ptr = getelementptr [4 x i8*], [4 x i8*]* @global_blockaddrs, i32 0, i32 %idx
    271   %a = load i8*, i8** %ptr
    272   indirectbr i8* %a, [ label %bb0, label %bb1, label %bb2, label %bb3 ]
    273 
    274 bb0:
    275   ret i32 2
    276 
    277 bb1:
    278   ret i32 7
    279 
    280 bb2:
    281   ret i32 13
    282 
    283 bb3:
    284   ret i32 42
    285 }
    286 
    287 ; This function's switch is crafted to trigger jump-table lowering in the x86
    288 ; backend so that we can test how the exact jump table lowering behaves.
    289 define i32 @test_switch_jumptable(i32 %idx) nounwind {
    290 ; X64-LABEL: test_switch_jumptable:
    291 ; X64:       # %bb.0: # %entry
    292 ; X64-NEXT:    movq %rsp, %rcx
    293 ; X64-NEXT:    movq $-1, %rax
    294 ; X64-NEXT:    sarq $63, %rcx
    295 ; X64-NEXT:    cmpl $3, %edi
    296 ; X64-NEXT:    ja .LBB6_2
    297 ; X64-NEXT:  # %bb.1: # %entry
    298 ; X64-NEXT:    cmovaq %rax, %rcx
    299 ; X64-NEXT:    movl %edi, %eax
    300 ; X64-NEXT:    movq .LJTI6_0(,%rax,8), %rax
    301 ; X64-NEXT:    orq %rcx, %rax
    302 ; X64-NEXT:    jmpq *%rax
    303 ; X64-NEXT:  .LBB6_3: # %bb1
    304 ; X64-NEXT:    movl $7, %eax
    305 ; X64-NEXT:    jmp .LBB6_4
    306 ; X64-NEXT:  .LBB6_2: # %bb0
    307 ; X64-NEXT:    cmovbeq %rax, %rcx
    308 ; X64-NEXT:    movl $2, %eax
    309 ; X64-NEXT:    jmp .LBB6_4
    310 ; X64-NEXT:  .LBB6_5: # %bb2
    311 ; X64-NEXT:    movl $13, %eax
    312 ; X64-NEXT:    jmp .LBB6_4
    313 ; X64-NEXT:  .LBB6_6: # %bb3
    314 ; X64-NEXT:    movl $42, %eax
    315 ; X64-NEXT:    jmp .LBB6_4
    316 ; X64-NEXT:  .LBB6_7: # %bb5
    317 ; X64-NEXT:    movl $11, %eax
    318 ; X64-NEXT:  .LBB6_4: # %bb1
    319 ; X64-NEXT:    shlq $47, %rcx
    320 ; X64-NEXT:    orq %rcx, %rsp
    321 ; X64-NEXT:    retq
    322 ;
    323 ; X64-RETPOLINE-LABEL: test_switch_jumptable:
    324 ; X64-RETPOLINE:       # %bb.0: # %entry
    325 ; X64-RETPOLINE-NEXT:    movq %rsp, %rcx
    326 ; X64-RETPOLINE-NEXT:    movq $-1, %rax
    327 ; X64-RETPOLINE-NEXT:    sarq $63, %rcx
    328 ; X64-RETPOLINE-NEXT:    cmpl $1, %edi
    329 ; X64-RETPOLINE-NEXT:    jg .LBB7_4
    330 ; X64-RETPOLINE-NEXT:  # %bb.1: # %entry
    331 ; X64-RETPOLINE-NEXT:    cmovgq %rax, %rcx
    332 ; X64-RETPOLINE-NEXT:    testl %edi, %edi
    333 ; X64-RETPOLINE-NEXT:    je .LBB7_8
    334 ; X64-RETPOLINE-NEXT:  # %bb.2: # %entry
    335 ; X64-RETPOLINE-NEXT:    cmoveq %rax, %rcx
    336 ; X64-RETPOLINE-NEXT:    cmpl $1, %edi
    337 ; X64-RETPOLINE-NEXT:    jne .LBB7_6
    338 ; X64-RETPOLINE-NEXT:  # %bb.3: # %bb2
    339 ; X64-RETPOLINE-NEXT:    cmovneq %rax, %rcx
    340 ; X64-RETPOLINE-NEXT:    movl $13, %eax
    341 ; X64-RETPOLINE-NEXT:    jmp .LBB7_7
    342 ; X64-RETPOLINE-NEXT:  .LBB7_4: # %entry
    343 ; X64-RETPOLINE-NEXT:    cmovleq %rax, %rcx
    344 ; X64-RETPOLINE-NEXT:    cmpl $2, %edi
    345 ; X64-RETPOLINE-NEXT:    je .LBB7_9
    346 ; X64-RETPOLINE-NEXT:  # %bb.5: # %entry
    347 ; X64-RETPOLINE-NEXT:    cmoveq %rax, %rcx
    348 ; X64-RETPOLINE-NEXT:    cmpl $3, %edi
    349 ; X64-RETPOLINE-NEXT:    jne .LBB7_6
    350 ; X64-RETPOLINE-NEXT:  # %bb.10: # %bb5
    351 ; X64-RETPOLINE-NEXT:    cmovneq %rax, %rcx
    352 ; X64-RETPOLINE-NEXT:    movl $11, %eax
    353 ; X64-RETPOLINE-NEXT:    jmp .LBB7_7
    354 ; X64-RETPOLINE-NEXT:  .LBB7_6:
    355 ; X64-RETPOLINE-NEXT:    cmoveq %rax, %rcx
    356 ; X64-RETPOLINE-NEXT:    movl $2, %eax
    357 ; X64-RETPOLINE-NEXT:    jmp .LBB7_7
    358 ; X64-RETPOLINE-NEXT:  .LBB7_8: # %bb1
    359 ; X64-RETPOLINE-NEXT:    cmovneq %rax, %rcx
    360 ; X64-RETPOLINE-NEXT:    movl $7, %eax
    361 ; X64-RETPOLINE-NEXT:    jmp .LBB7_7
    362 ; X64-RETPOLINE-NEXT:  .LBB7_9: # %bb3
    363 ; X64-RETPOLINE-NEXT:    cmovneq %rax, %rcx
    364 ; X64-RETPOLINE-NEXT:    movl $42, %eax
    365 ; X64-RETPOLINE-NEXT:  .LBB7_7: # %bb0
    366 ; X64-RETPOLINE-NEXT:    shlq $47, %rcx
    367 ; X64-RETPOLINE-NEXT:    orq %rcx, %rsp
    368 ; X64-RETPOLINE-NEXT:    retq
    369 entry:
    370   switch i32 %idx, label %bb0 [
    371     i32 0, label %bb1
    372     i32 1, label %bb2
    373     i32 2, label %bb3
    374     i32 3, label %bb5
    375   ]
    376 
    377 bb0:
    378   ret i32 2
    379 
    380 bb1:
    381   ret i32 7
    382 
    383 bb2:
    384   ret i32 13
    385 
    386 bb3:
    387   ret i32 42
    388 
    389 bb5:
    390   ret i32 11
    391 }
    392