1/*
2 * Copyright 2020, Data61, CSIRO (ABN 41 687 119 230)
3 *
4 * SPDX-License-Identifier: GPL-2.0-only
5 */
6
7#include <config.h>
8#include <machine/assembler.h>
9#include <arch/api/syscall.h>
10#include <arch/machine/hardware.h>
11#include <arch/machine/registerset.h>
12
13#define VM_EVENT_DATA_ABORT 0
14#define VM_EVENT_PREFETCH_ABORT 1
15
16#ifdef CONFIG_ARM_HYPERVISOR_SUPPORT
17
18#define ELR     elr_el2
19#define ESR     esr_el2
20#define SPSR    spsr_el2
21#define TPIDR   tpidr_el2
22
23#else
24
25#define ELR     elr_el1
26#define ESR     esr_el1
27#define SPSR    spsr_el1
28#define TPIDR   tpidr_el1
29
30#endif
31
32
33.macro lsp_i _tmp
34    mrs     \_tmp, TPIDR
35#if CONFIG_MAX_NUM_NODES > 1
36    bic     \_tmp, \_tmp, #0xfff
37#endif
38    mov     sp, \_tmp
39.endm
40
41.macro ventry label
42.align 7
43    b       \label
44.endm
45
46.section .vectors
47
48BEGIN_FUNC(arm_vector_table)
49    ventry  invalid_vector_entry           // Synchronous EL1t/EL2t
50    ventry  invalid_vector_entry           // IRQ EL1t/EL2t
51    ventry  invalid_vector_entry           // FIQ EL1t/EL2t
52    ventry  invalid_vector_entry           // SError EL1t/EL2t
53
54    ventry  cur_el_sync                    // Current EL Synchronous (EL1/2)
55    ventry  cur_el_irq                     // IRQ
56    ventry  invalid_vector_entry           // FIQ
57    ventry  cur_el_serr                    // SError
58
59    ventry  lower_el_sync                  // Synchronous 64-bit EL0/EL1
60    ventry  lower_el_irq                   // IRQ 64-bit EL0/EL1
61    ventry  invalid_vector_entry           // FIQ 64-bit EL0/EL1
62    ventry  lower_el_serr                  // SError 64-bit EL0/EL1
63
64    ventry  invalid_vector_entry           // Synchronous 32-bit EL0/EL1
65    ventry  invalid_vector_entry           // IRQ 32-bit EL0/EL1
66    ventry  invalid_vector_entry           // FIQ 32-bit EL0/EL1
67    ventry  invalid_vector_entry           // SError 32-bit EL0/EL1
68END_FUNC(arm_vector_table)
69
70.section .vectors.text
71
72.macro kernel_enter
73    /* Storing thread's stack frame */
74    stp     x0,  x1,  [sp, #16 * 0]
75    stp     x2,  x3,  [sp, #16 * 1]
76    stp     x4,  x5,  [sp, #16 * 2]
77    stp     x6,  x7,  [sp, #16 * 3]
78    stp     x8,  x9,  [sp, #16 * 4]
79    stp     x10, x11, [sp, #16 * 5]
80    stp     x12, x13, [sp, #16 * 6]
81    stp     x14, x15, [sp, #16 * 7]
82    stp     x16, x17, [sp, #16 * 8]
83    stp     x18, x19, [sp, #16 * 9]
84    stp     x20, x21, [sp, #16 * 10]
85    stp     x22, x23, [sp, #16 * 11]
86    stp     x24, x25, [sp, #16 * 12]
87    stp     x26, x27, [sp, #16 * 13]
88    stp     x28, x29, [sp, #16 * 14]
89
90    /* Store thread's SPSR, LR, and SP */
91    mrs     x21, sp_el0
92    mrs     x22, ELR
93    mrs     x23, SPSR
94    stp     x30, x21, [sp, #PT_LR]
95    stp     x22, x23, [sp, #PT_ELR_EL1]
96.endm
97
98BEGIN_FUNC(invalid_vector_entry)
99    lsp_i   x19
100    b       halt
101END_FUNC(invalid_vector_entry)
102
103BEGIN_FUNC(cur_el_sync)
104    lsp_i   x19
105    /* Read esr and branch to respective labels */
106    mrs     x25, ESR
107    lsr     x24, x25, #ESR_EC_SHIFT
108    cmp     x24, #ESR_EC_CEL_DABT
109    b.eq    cur_el_da
110    cmp     x24, #ESR_EC_CEL_IABT
111    b.eq    cur_el_ia
112    b       cur_el_inv
113
114cur_el_da:
115#ifdef CONFIG_DEBUG_BUILD
116    mrs     x0, ELR
117    bl      kernelDataAbort
118#endif /* CONFIG_DEBUG_BUILD */
119    b       halt
120
121cur_el_ia:
122#ifdef CONFIG_DEBUG_BUILD
123    mrs     x0, ELR
124    bl      kernelPrefetchAbort
125#endif /* CONFIG_DEBUG_BUILD */
126    b       halt
127
128cur_el_inv:
129    b       invalid_vector_entry
130END_FUNC(cur_el_sync)
131
132/*
133 * This is only called if ksCurThread is idle thread.
134 *
135 * No need to store the state of idle thread and simply call c_handle_interrupt to
136 * activate ksCurThread when returning from interrupt as long as idle thread is stateless.
137 */
138BEGIN_FUNC(cur_el_irq)
139    lsp_i   x19
140    b       c_handle_interrupt
141END_FUNC(cur_el_irq)
142
143BEGIN_FUNC(cur_el_serr)
144#ifdef CONFIG_PLAT_TX2
145    eret
146#else
147    b       invalid_vector_entry
148#endif
149END_FUNC(cur_el_serr)
150
151BEGIN_FUNC(lower_el_sync)
152    kernel_enter
153
154    /* Read esr and branch to respective labels */
155    mrs     x25, ESR
156    lsr     x24, x25, #ESR_EC_SHIFT
157    cmp     x24, #ESR_EC_LEL_DABT
158    b.eq    lel_da
159    cmp     x24, #ESR_EC_LEL_IABT
160    b.eq    lel_ia
161    cmp     x24, #ESR_EC_LEL_SVC64
162    b.eq    lel_syscall
163#ifdef CONFIG_ARM_HYPERVISOR_SUPPORT
164    cmp     x24, #ESR_EC_LEL_HVC64
165    b.eq    lel_syscall
166    mrs     x20, ELR
167    str     x20, [sp, #PT_FaultIP]
168
169    lsp_i   x19
170    /* move the ESR as the input */
171    mov     x0, x25
172    b       c_handle_vcpu_fault
173#else
174    cmp     x24, #ESR_EL1_EC_ENFP
175    b.eq    el0_enfp
176    b       el0_user
177#endif
178
179lel_da:
180    mrs     x20, ELR
181    str     x20, [sp, #PT_FaultIP]
182
183    lsp_i   x19
184    b       c_handle_data_fault
185
186lel_ia:
187    mrs     x20, ELR
188    str     x20, [sp, #PT_FaultIP]
189
190    lsp_i   x19
191    b       c_handle_instruction_fault
192
193lel_syscall:
194    mrs     x20, ELR
195    sub     x20, x20, #4
196    str     x20, [sp, #PT_FaultIP]
197
198    lsp_i   x19
199
200#ifdef CONFIG_FASTPATH
201    cmp     x7, #SYSCALL_CALL
202    b.eq    c_handle_fastpath_call
203    cmp     x7, #SYSCALL_REPLY_RECV
204#ifdef CONFIG_KERNEL_MCS
205    mov     x2, x6
206#endif
207    b.eq    c_handle_fastpath_reply_recv
208#endif
209
210    mov     x2, x7
211    b       c_handle_syscall
212
213el0_enfp:
214#ifdef CONFIG_HAVE_FPU
215    lsp_i   x19
216    b       c_handle_enfp
217#endif /* CONFIG_HAVE_FPU */
218
219el0_user:
220    mrs     x20, ELR
221    str     x20, [sp, #PT_FaultIP]
222
223    lsp_i   x19
224    b       c_handle_undefined_instruction
225END_FUNC(lower_el_sync)
226
227BEGIN_FUNC(lower_el_irq)
228    kernel_enter
229    mrs     x20, ELR
230    str     x20, [sp, #PT_FaultIP]
231
232    lsp_i   x19
233    b       c_handle_interrupt
234END_FUNC(lower_el_irq)
235
236BEGIN_FUNC(lower_el_serr)
237#ifdef CONFIG_PLAT_TX2
238    eret
239#else
240    b       invalid_vector_entry
241#endif
242END_FUNC(lower_el_serr)
243