Home | History | Annotate | Download | only in AArch64
      1 ; RUN: llc -o - %s | FileCheck %s
      2 ; Check that we reserve an emergency spill slot, even if we added an extra
      3 ; CSR spill for the values used by the swiftself parameter.
      4 ; CHECK-LABEL: func:
      5 ; CHECK: str [[REG:x[0-9]+]], [sp, #8]
      6 ; CHECK: add [[REG]], sp, #248
      7 ; CHECK: str xzr, [{{\s*}}[[REG]], #32760]
      8 ; CHECK: ldr [[REG]], [sp, #8]
      9 target triple = "arm64-apple-ios"
     10 
     11 @ptr8 = external global i8*
     12 @ptr64 = external global i64
     13 
     14 define hidden swiftcc void @func(i8* swiftself %arg) #0 {
     15 bb:
     16   %stack0 = alloca i8*, i32 5000, align 8
     17   %stack1 = alloca i8*, i32 32, align 8
     18 
     19   %v0  = load volatile i64, i64* @ptr64, align 8
     20   %v1  = load volatile i64, i64* @ptr64, align 8
     21   %v2  = load volatile i64, i64* @ptr64, align 8
     22   %v3  = load volatile i64, i64* @ptr64, align 8
     23   %v4  = load volatile i64, i64* @ptr64, align 8
     24   %v5  = load volatile i64, i64* @ptr64, align 8
     25   %v6  = load volatile i64, i64* @ptr64, align 8
     26   %v7  = load volatile i64, i64* @ptr64, align 8
     27   %v8  = load volatile i64, i64* @ptr64, align 8
     28   %v9  = load volatile i64, i64* @ptr64, align 8
     29   %v10 = load volatile i64, i64* @ptr64, align 8
     30   %v11 = load volatile i64, i64* @ptr64, align 8
     31   %v12 = load volatile i64, i64* @ptr64, align 8
     32   %v13 = load volatile i64, i64* @ptr64, align 8
     33   %v14 = load volatile i64, i64* @ptr64, align 8
     34   %v15 = load volatile i64, i64* @ptr64, align 8
     35   %v16 = load volatile i64, i64* @ptr64, align 8
     36   %v17 = load volatile i64, i64* @ptr64, align 8
     37   %v18 = load volatile i64, i64* @ptr64, align 8
     38   %v19 = load volatile i64, i64* @ptr64, align 8
     39   %v20 = load volatile i64, i64* @ptr64, align 8
     40   %v21 = load volatile i64, i64* @ptr64, align 8
     41   %v22 = load volatile i64, i64* @ptr64, align 8
     42   %v23 = load volatile i64, i64* @ptr64, align 8
     43   %v24 = load volatile i64, i64* @ptr64, align 8
     44   %v25 = load volatile i64, i64* @ptr64, align 8
     45 
     46   ; this should exceed stack-relative addressing limits and need an emergency
     47   ; spill slot.
     48   %s = getelementptr inbounds i8*, i8** %stack0, i64 4092
     49   store volatile i8* null, i8** %s
     50   store volatile i8* null, i8** %stack1
     51 
     52   store volatile i64 %v0,  i64* @ptr64, align 8
     53   store volatile i64 %v1,  i64* @ptr64, align 8
     54   store volatile i64 %v2,  i64* @ptr64, align 8
     55   store volatile i64 %v3,  i64* @ptr64, align 8
     56   store volatile i64 %v4,  i64* @ptr64, align 8
     57   store volatile i64 %v5,  i64* @ptr64, align 8
     58   store volatile i64 %v6,  i64* @ptr64, align 8
     59   store volatile i64 %v7,  i64* @ptr64, align 8
     60   store volatile i64 %v8,  i64* @ptr64, align 8
     61   store volatile i64 %v9,  i64* @ptr64, align 8
     62   store volatile i64 %v10, i64* @ptr64, align 8
     63   store volatile i64 %v11, i64* @ptr64, align 8
     64   store volatile i64 %v12, i64* @ptr64, align 8
     65   store volatile i64 %v13, i64* @ptr64, align 8
     66   store volatile i64 %v14, i64* @ptr64, align 8
     67   store volatile i64 %v15, i64* @ptr64, align 8
     68   store volatile i64 %v16, i64* @ptr64, align 8
     69   store volatile i64 %v17, i64* @ptr64, align 8
     70   store volatile i64 %v18, i64* @ptr64, align 8
     71   store volatile i64 %v19, i64* @ptr64, align 8
     72   store volatile i64 %v20, i64* @ptr64, align 8
     73   store volatile i64 %v21, i64* @ptr64, align 8
     74   store volatile i64 %v22, i64* @ptr64, align 8
     75   store volatile i64 %v23, i64* @ptr64, align 8
     76   store volatile i64 %v24, i64* @ptr64, align 8
     77   store volatile i64 %v25, i64* @ptr64, align 8
     78   
     79   ; use swiftself parameter late so it stays alive throughout the function.
     80   store volatile i8* %arg, i8** @ptr8
     81   ret void
     82 }
     83