1/*
2 * Copyright 2019-2022 Haiku, Inc. All Rights Reserved.
3 * Distributed under the terms of the MIT License.
4 */
5#include <arch/arm/arch_cpu.h>
6#include <asm_defs.h>
7#include "asm_offsets.h"
8#include "syscall_numbers.h"
9
10.text
11
12FUNCTION(_thread_exit_syscall):
13	svc #((SYSCALL_EXIT_THREAD << 5) | 1)
14FUNCTION_END(_thread_exit_syscall)
15
16.macro xchg_sp xt
17add	sp, sp, \xt
18sub	\xt, sp, \xt
19sub	sp, sp, \xt
20.endm
21
22.macro EXCEPTION_ENTRY el
23	// interrupts are automatically disabled by hardware
24
25	// avoid using sp in case it is misaligned
26	// swap sp with x19 and use it instead
27	xchg_sp x19
28
29	// x19 is now the stack top, make room for IFRAME
30	sub x19, x19, #(IFRAME_sizeof)
31
32	stp	    x0,  x1, [x19, #(IFRAME_x + 0 * 8)]
33	stp	    x2,  x3, [x19, #(IFRAME_x + 2 * 8)]
34	stp	    x4,  x5, [x19, #(IFRAME_x + 4 * 8)]
35	stp	    x6,  x7, [x19, #(IFRAME_x + 6 * 8)]
36	stp	    x8,  x9, [x19, #(IFRAME_x + 8 * 8)]
37	stp	   x10, x11, [x19, #(IFRAME_x + 10 * 8)]
38	stp	   x12, x13, [x19, #(IFRAME_x + 12 * 8)]
39	stp	   x14, x15, [x19, #(IFRAME_x + 14 * 8)]
40	stp	   x16, x17, [x19, #(IFRAME_x + 16 * 8)]
41	mov    x0,   sp  // original x19 that we swapped with sp
42	stp	   x18,  x0, [x19, #(IFRAME_x + 18 * 8)]
43
44	// x20-x28 won't be clobbered
45	// thus we don't really need to store these
46
47	str	   x29,      [x19, #(IFRAME_fp)]
48	str	   x30,      [x19, #(IFRAME_lr)]
49
50.if \el == 0
51	mrs x0, SP_EL0
52.else
53	// add sizeof back here to store original sp
54	add x0, x19, #(IFRAME_sizeof)
55.endif
56
57	mrs x1, ELR_EL1
58	mrs x2, SPSR_EL1
59	mrs x3, ESR_EL1
60	mrs x4, FAR_EL1
61
62	str x0, [x19, #(IFRAME_sp)]
63	str x1, [x19, #(IFRAME_elr)]
64	str x2, [x19, #(IFRAME_spsr)]
65	str x3, [x19, #(IFRAME_esr)]
66	str x4, [x19, #(IFRAME_far)]
67.endm
68
69.macro EXCEPTION_RETURN el
70	// x19 is callee-saved so it still points to IFRAME
71	// x0, x1, x18, x19 will be restored at the very end
72
73	ldr x0,  [x19, #(IFRAME_elr)]
74	ldr x1,  [x19, #(IFRAME_spsr)]
75	ldr x18, [x19, #(IFRAME_sp)]
76
77	// x0 and x1 will be restored later
78	ldp	    x2,  x3, [x19, #(IFRAME_x + 2 * 8)]
79	ldp	    x4,  x5, [x19, #(IFRAME_x + 4 * 8)]
80	ldp	    x6,  x7, [x19, #(IFRAME_x + 6 * 8)]
81	ldp	    x8,  x9, [x19, #(IFRAME_x + 8 * 8)]
82	ldp	   x10, x11, [x19, #(IFRAME_x + 10 * 8)]
83	ldp	   x12, x13, [x19, #(IFRAME_x + 12 * 8)]
84	ldp	   x14, x15, [x19, #(IFRAME_x + 14 * 8)]
85	ldp	   x16, x17, [x19, #(IFRAME_x + 16 * 8)]
86	// x18 and x19 will be restored later
87	ldr	   x29, [x19, #(IFRAME_fp)]
88	ldr	   x30, [x19, #(IFRAME_lr)]
89
90	// disable interrupts before restoring ELR/SPSR/sp
91	msr DAIFSet, #0xf
92
93	msr ELR_EL1, x0
94	msr SPSR_EL1, x1
95
96.if \el == 0
97	// load stack pointer for EL0 from IFRAME
98	msr SP_EL0, x18
99
100	// unwind our own stack pointer
101	add sp, x19, #(IFRAME_sizeof)
102.else
103	// we stored original pointer to IFRAME, no need to unwind again there
104	mov sp, x18
105.endif
106
107	// finally restore remaining registers
108	ldp x0,   x1, [x19, #(IFRAME_x + 0 * 8)]
109	ldp x18, x19, [x19, #(IFRAME_x + 18 * 8)]
110
111	eret
112.endm
113
114.macro EXCEPTION_HANDLER el name func
115	STATIC_FUNCTION(handle_\name):
116		EXCEPTION_ENTRY \el
117
118		// prepare aligned sp for C function
119		and sp, x19, #0xfffffffffffffff0
120
121		// call C handler, passing IFRAME in x0
122		// handler can enable interrupts if it wants to
123		mov x0, x19
124		mov x29, x0
125		bl \func
126
127		EXCEPTION_RETURN \el
128	FUNCTION_END(handle_\name)
129.endm
130
131.macro	vector	name
132	.align 7
133	b	handle_\name
134.endm
135
136.macro	vempty
137	.align 7
138	brk	0xfff
139	1: b	1b
140.endm
141
142.align 11
143.globl _exception_vectors
144_exception_vectors:
145	vempty             /* Synchronous EL1t */
146	vempty             /* IRQ EL1t */
147	vempty             /* FIQ EL1t */
148	vempty             /* Error EL1t */
149
150	vector el1h_sync   /* Synchronous EL1h */
151	vector el1h_irq    /* IRQ EL1h */
152	vector el1h_fiq    /* FIQ EL1h */
153	vector el1h_error  /* Error EL1h */
154
155	vector el0_sync    /* Synchronous 64-bit EL0 */
156	vector el0_irq     /* IRQ 64-bit EL0 */
157	vector el0_fiq     /* FIQ 64-bit EL0 */
158	vector el0_error   /* Error 64-bit EL0 */
159
160	vempty             /* Synchronous 32-bit EL0 */
161	vempty             /* IRQ 32-bit EL0 */
162	vempty             /* FIQ 32-bit EL0 */
163	vempty             /* Error 32-bit EL0 */
164
165EXCEPTION_HANDLER 1 el1h_sync do_sync_handler
166EXCEPTION_HANDLER 1 el1h_irq do_irq_handler
167EXCEPTION_HANDLER 1 el1h_fiq do_fiq_handler
168EXCEPTION_HANDLER 1 el1h_error do_error_handler
169
170EXCEPTION_HANDLER 0 el0_sync do_sync_handler
171EXCEPTION_HANDLER 0 el0_irq do_irq_handler
172EXCEPTION_HANDLER 0 el0_fiq do_fiq_handler
173EXCEPTION_HANDLER 0 el0_error do_error_handler
174
175FUNCTION(_eret_with_iframe):
176	mov x20, xzr
177	mov x21, xzr
178	mov x22, xzr
179	mov x23, xzr
180	mov x24, xzr
181	mov x25, xzr
182	mov x26, xzr
183	mov x27, xzr
184	mov x28, xzr
185	mov x29, xzr
186
187	mov x19, x0
188	EXCEPTION_RETURN 0
189FUNCTION_END(_eret_with_iframe)
190
191FUNCTION(_fp_save):
192	stp q0, q1, [x0], #32
193	stp q2, q3, [x0], #32
194	stp q4, q5, [x0], #32
195	stp q6, q7, [x0], #32
196	stp q8, q9, [x0], #32
197	stp q10, q11, [x0], #32
198	stp q12, q13, [x0], #32
199	stp q14, q15, [x0], #32
200	stp q16, q17, [x0], #32
201	stp q18, q19, [x0], #32
202	stp q20, q21, [x0], #32
203	stp q22, q23, [x0], #32
204	stp q24, q25, [x0], #32
205	stp q26, q27, [x0], #32
206	stp q28, q29, [x0], #32
207	stp q30, q31, [x0], #32
208	mrs x1, FPSR
209	mrs x2, FPCR
210	str x1, [x0], #8
211	str x2, [x0], #8
212
213	// reset FPCR and FPSR to prevent userspace state affecting kernel
214	msr FPSR, xzr
215	cmp x2, xzr
216	beq 1f
217	msr FPCR, xzr
2181:
219	ret
220FUNCTION_END(_fp_save)
221
222FUNCTION(_fp_restore):
223	ldp q0, q1, [x0], #32
224	ldp q2, q3, [x0], #32
225	ldp q4, q5, [x0], #32
226	ldp q6, q7, [x0], #32
227	ldp q8, q9, [x0], #32
228	ldp q10, q11, [x0], #32
229	ldp q12, q13, [x0], #32
230	ldp q14, q15, [x0], #32
231	ldp q16, q17, [x0], #32
232	ldp q18, q19, [x0], #32
233	ldp q20, q21, [x0], #32
234	ldp q22, q23, [x0], #32
235	ldp q24, q25, [x0], #32
236	ldp q26, q27, [x0], #32
237	ldp q28, q29, [x0], #32
238	ldp q30, q31, [x0], #32
239
240	ldr x1, [x0], #8
241	msr FPSR, x1
242
243	// avoid restoring FPCR if it hasn't changed
244	ldr x2, [x0], #8
245	mrs x3, FPCR
246	cmp x3, x2
247	beq 1f
248	msr FPCR, x2
2491:
250	ret
251FUNCTION_END(_fp_restore)
252
253FUNCTION(_arch_context_swap):
254	// save
255	stp x19, x20, [x0], #16
256	stp x21, x22, [x0], #16
257	stp x23, x24, [x0], #16
258	stp x25, x26, [x0], #16
259	stp x27, x28, [x0], #16
260	stp x29, x30, [x0], #16
261
262	mov x2, sp
263	mrs x3, TPIDR_EL0
264	stp  x2,  x3, [x0], #16
265
266	stp  d8,  d9, [x0], #16
267	stp d10, d11, [x0], #16
268	stp d12, d13, [x0], #16
269	stp d14, d15, [x0], #16
270
271	// restore
272	ldp x19, x20, [x1], #16
273	ldp x21, x22, [x1], #16
274	ldp x23, x24, [x1], #16
275	ldp x25, x26, [x1], #16
276	ldp x27, x28, [x1], #16
277	ldp x29, x30, [x1], #16
278
279	ldp  x2,  x3, [x1], #16
280	mov sp, x2
281	msr TPIDR_EL0, x3
282
283	ldp  d8,  d9, [x1], #16
284	ldp d10, d11, [x1], #16
285	ldp d12, d13, [x1], #16
286	ldp d14, d15, [x1], #16
287
288	// pass x29 as argument to thread entry function
289	mov x0, x29
290	ret
291FUNCTION_END(_arch_context_swap)
292
293/*!	\fn void arch_debug_call_with_fault_handler(cpu_ent* cpu,
294		jmp_buf jumpBuffer, void (*function)(void*), void* parameter)
295
296	Called by debug_call_with_fault_handler() to do the dirty work of setting
297	the fault handler and calling the function. If the function causes a page
298	fault, the arch_debug_call_with_fault_handler() calls longjmp() with the
299	given \a jumpBuffer. Otherwise it returns normally.
300
301	debug_call_with_fault_handler() has already saved the CPU's fault_handler
302	and fault_handler_stack_pointer and will reset them later, so
303	arch_debug_call_with_fault_handler() doesn't need to care about it.
304
305	\param cpu The \c cpu_ent for the current CPU.
306	\param jumpBuffer Buffer to be used for longjmp().
307	\param function The function to be called.
308	\param parameter The parameter to be passed to the function to be called.
309*/
310FUNCTION(arch_debug_call_with_fault_handler):
311	ldr x4, =fault
312	str x4, [x0, #CPU_ENT_fault_handler]
313	str x1, [x0, #CPU_ENT_fault_handler_stack_pointer]
314
315	mov x0, x3
316	br x2
317
318fault:
319	mov x0, sp
320	mov x1, #1
321	b longjmp
322FUNCTION_END(arch_debug_call_with_fault_handler)
323
324
325/* addr_t arm64_get_fp(void) */
326FUNCTION(arm64_get_fp):
327	mov x0, x29
328	ret
329FUNCTION_END(arm64_get_fp)
330