Home | History | Annotate | Download | only in sanitizer_common
      1 //===-- sanitizer_syscall_linux_aarch64.inc --------------------*- C++ -*-===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // Implementations of internal_syscall and internal_iserror for Linux/aarch64.
     11 //
     12 //===----------------------------------------------------------------------===//
     13 
     14 #define SYSCALL(name) __NR_ ## name
     15 
     16 static uptr __internal_syscall(u64 nr) {
     17   register u64 x8 asm("x8") = nr;
     18   register u64 x0 asm("x0");
     19   asm volatile("svc 0"
     20                : "=r"(x0)
     21                : "r"(x8)
     22                : "memory", "cc");
     23   return x0;
     24 }
     25 #define __internal_syscall0(n) \
     26   (__internal_syscall)(n)
     27 
     28 static uptr __internal_syscall(u64 nr, u64 arg1) {
     29   register u64 x8 asm("x8") = nr;
     30   register u64 x0 asm("x0") = arg1;
     31   asm volatile("svc 0"
     32                : "=r"(x0)
     33                : "r"(x8), "0"(x0)
     34                : "memory", "cc");
     35   return x0;
     36 }
     37 #define __internal_syscall1(n, a1) \
     38   (__internal_syscall)(n, (u64)(a1))
     39 
     40 static uptr __internal_syscall(u64 nr, u64 arg1, long arg2) {
     41   register u64 x8 asm("x8") = nr;
     42   register u64 x0 asm("x0") = arg1;
     43   register u64 x1 asm("x1") = arg2;
     44   asm volatile("svc 0"
     45                : "=r"(x0)
     46                : "r"(x8), "0"(x0), "r"(x1)
     47                : "memory", "cc");
     48   return x0;
     49 }
     50 #define __internal_syscall2(n, a1, a2) \
     51   (__internal_syscall)(n, (u64)(a1), (long)(a2))
     52 
     53 static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3) {
     54   register u64 x8 asm("x8") = nr;
     55   register u64 x0 asm("x0") = arg1;
     56   register u64 x1 asm("x1") = arg2;
     57   register u64 x2 asm("x2") = arg3;
     58   asm volatile("svc 0"
     59                : "=r"(x0)
     60                : "r"(x8), "0"(x0), "r"(x1), "r"(x2)
     61                : "memory", "cc");
     62   return x0;
     63 }
     64 #define __internal_syscall3(n, a1, a2, a3) \
     65   (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3))
     66 
     67 static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3,
     68                                u64 arg4) {
     69   register u64 x8 asm("x8") = nr;
     70   register u64 x0 asm("x0") = arg1;
     71   register u64 x1 asm("x1") = arg2;
     72   register u64 x2 asm("x2") = arg3;
     73   register u64 x3 asm("x3") = arg4;
     74   asm volatile("svc 0"
     75                : "=r"(x0)
     76                : "r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3)
     77                : "memory", "cc");
     78   return x0;
     79 }
     80 #define __internal_syscall4(n, a1, a2, a3, a4) \
     81   (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4))
     82 
     83 static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3,
     84                                u64 arg4, long arg5) {
     85   register u64 x8 asm("x8") = nr;
     86   register u64 x0 asm("x0") = arg1;
     87   register u64 x1 asm("x1") = arg2;
     88   register u64 x2 asm("x2") = arg3;
     89   register u64 x3 asm("x3") = arg4;
     90   register u64 x4 asm("x4") = arg5;
     91   asm volatile("svc 0"
     92                : "=r"(x0)
     93                : "r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4)
     94                : "memory", "cc");
     95   return x0;
     96 }
     97 #define __internal_syscall5(n, a1, a2, a3, a4, a5) \
     98   (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4), \
     99                        (u64)(a5))
    100 
    101 static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3,
    102                                u64 arg4, long arg5, long arg6) {
    103   register u64 x8 asm("x8") = nr;
    104   register u64 x0 asm("x0") = arg1;
    105   register u64 x1 asm("x1") = arg2;
    106   register u64 x2 asm("x2") = arg3;
    107   register u64 x3 asm("x3") = arg4;
    108   register u64 x4 asm("x4") = arg5;
    109   register u64 x5 asm("x5") = arg6;
    110   asm volatile("svc 0"
    111                : "=r"(x0)
    112                : "r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4), "r"(x5)
    113                : "memory", "cc");
    114   return x0;
    115 }
    116 #define __internal_syscall6(n, a1, a2, a3, a4, a5, a6) \
    117   (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4), \
    118                        (u64)(a5), (long)(a6))
    119 
    120 #define __SYSCALL_NARGS_X(a1, a2, a3, a4, a5, a6, a7, a8, n, ...) n
    121 #define __SYSCALL_NARGS(...) \
    122   __SYSCALL_NARGS_X(__VA_ARGS__, 7, 6, 5, 4, 3, 2, 1, 0, )
    123 #define __SYSCALL_CONCAT_X(a, b) a##b
    124 #define __SYSCALL_CONCAT(a, b) __SYSCALL_CONCAT_X(a, b)
    125 #define __SYSCALL_DISP(b, ...) \
    126   __SYSCALL_CONCAT(b, __SYSCALL_NARGS(__VA_ARGS__))(__VA_ARGS__)
    127 
    128 #define internal_syscall(...) __SYSCALL_DISP(__internal_syscall, __VA_ARGS__)
    129 
    130 // Helper function used to avoid cobbler errno.
    131 bool internal_iserror(uptr retval, int *rverrno) {
    132   if (retval >= (uptr)-4095) {
    133     if (rverrno)
    134       *rverrno = -retval;
    135     return true;
    136   }
    137   return false;
    138 }
    139