1 /* SPDX-License-Identifier: LGPL-2.1 OR MIT */
2 /*
3 * AARCH64 specific definitions for NOLIBC
4 * Copyright (C) 2017-2022 Willy Tarreau <w@1wt.eu>
5 */
6
7 #ifndef _NOLIBC_ARCH_AARCH64_H
8 #define _NOLIBC_ARCH_AARCH64_H
9
10 #include "compiler.h"
11 #include "crt.h"
12
13 /* Syscalls for AARCH64 :
14 * - registers are 64-bit
15 * - stack is 16-byte aligned
16 * - syscall number is passed in x8
17 * - arguments are in x0, x1, x2, x3, x4, x5
18 * - the system call is performed by calling svc 0
19 * - syscall return comes in x0.
20 * - the arguments are cast to long and assigned into the target registers
21 * which are then simply passed as registers to the asm code, so that we
22 * don't have to experience issues with register constraints.
23 */
24
25 #define my_syscall0(num) \
26 ({ \
27 register long _num __asm__ ("x8") = (num); \
28 register long _arg1 __asm__ ("x0"); \
29 \
30 __asm__ volatile ( \
31 "svc #0\n" \
32 : "=r"(_arg1) \
33 : "r"(_num) \
34 : "memory", "cc" \
35 ); \
36 _arg1; \
37 })
38
39 #define my_syscall1(num, arg1) \
40 ({ \
41 register long _num __asm__ ("x8") = (num); \
42 register long _arg1 __asm__ ("x0") = (long)(arg1); \
43 \
44 __asm__ volatile ( \
45 "svc #0\n" \
46 : "=r"(_arg1) \
47 : "r"(_arg1), \
48 "r"(_num) \
49 : "memory", "cc" \
50 ); \
51 _arg1; \
52 })
53
54 #define my_syscall2(num, arg1, arg2) \
55 ({ \
56 register long _num __asm__ ("x8") = (num); \
57 register long _arg1 __asm__ ("x0") = (long)(arg1); \
58 register long _arg2 __asm__ ("x1") = (long)(arg2); \
59 \
60 __asm__ volatile ( \
61 "svc #0\n" \
62 : "=r"(_arg1) \
63 : "r"(_arg1), "r"(_arg2), \
64 "r"(_num) \
65 : "memory", "cc" \
66 ); \
67 _arg1; \
68 })
69
70 #define my_syscall3(num, arg1, arg2, arg3) \
71 ({ \
72 register long _num __asm__ ("x8") = (num); \
73 register long _arg1 __asm__ ("x0") = (long)(arg1); \
74 register long _arg2 __asm__ ("x1") = (long)(arg2); \
75 register long _arg3 __asm__ ("x2") = (long)(arg3); \
76 \
77 __asm__ volatile ( \
78 "svc #0\n" \
79 : "=r"(_arg1) \
80 : "r"(_arg1), "r"(_arg2), "r"(_arg3), \
81 "r"(_num) \
82 : "memory", "cc" \
83 ); \
84 _arg1; \
85 })
86
87 #define my_syscall4(num, arg1, arg2, arg3, arg4) \
88 ({ \
89 register long _num __asm__ ("x8") = (num); \
90 register long _arg1 __asm__ ("x0") = (long)(arg1); \
91 register long _arg2 __asm__ ("x1") = (long)(arg2); \
92 register long _arg3 __asm__ ("x2") = (long)(arg3); \
93 register long _arg4 __asm__ ("x3") = (long)(arg4); \
94 \
95 __asm__ volatile ( \
96 "svc #0\n" \
97 : "=r"(_arg1) \
98 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), \
99 "r"(_num) \
100 : "memory", "cc" \
101 ); \
102 _arg1; \
103 })
104
105 #define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \
106 ({ \
107 register long _num __asm__ ("x8") = (num); \
108 register long _arg1 __asm__ ("x0") = (long)(arg1); \
109 register long _arg2 __asm__ ("x1") = (long)(arg2); \
110 register long _arg3 __asm__ ("x2") = (long)(arg3); \
111 register long _arg4 __asm__ ("x3") = (long)(arg4); \
112 register long _arg5 __asm__ ("x4") = (long)(arg5); \
113 \
114 __asm__ volatile ( \
115 "svc #0\n" \
116 : "=r" (_arg1) \
117 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
118 "r"(_num) \
119 : "memory", "cc" \
120 ); \
121 _arg1; \
122 })
123
124 #define my_syscall6(num, arg1, arg2, arg3, arg4, arg5, arg6) \
125 ({ \
126 register long _num __asm__ ("x8") = (num); \
127 register long _arg1 __asm__ ("x0") = (long)(arg1); \
128 register long _arg2 __asm__ ("x1") = (long)(arg2); \
129 register long _arg3 __asm__ ("x2") = (long)(arg3); \
130 register long _arg4 __asm__ ("x3") = (long)(arg4); \
131 register long _arg5 __asm__ ("x4") = (long)(arg5); \
132 register long _arg6 __asm__ ("x5") = (long)(arg6); \
133 \
134 __asm__ volatile ( \
135 "svc #0\n" \
136 : "=r" (_arg1) \
137 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
138 "r"(_arg6), "r"(_num) \
139 : "memory", "cc" \
140 ); \
141 _arg1; \
142 })
143
144 /* startup code */
_start(void)145 void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _start(void)
146 {
147 __asm__ volatile (
148 "mov x0, sp\n" /* save stack pointer to x0, as arg1 of _start_c */
149 "and sp, x0, -16\n" /* sp must be 16-byte aligned in the callee */
150 "bl _start_c\n" /* transfer to c runtime */
151 );
152 __nolibc_entrypoint_epilogue();
153 }
154 #endif /* _NOLIBC_ARCH_AARCH64_H */
155