1/*
2 * Copyright 2013, Fran��ois Revol <revol@free.fr>.
3 * All rights reserved. Distributed under the terms of the MIT License.
4 *
5 * Copyright 2006, Ingo Weinhold <bonefish@cs.tu-berlin.de>.
6 * All rights reserved. Distributed under the terms of the MIT License.
7 *
8 * Copyright 2003, Travis Geiselbrecht. All rights reserved.
9 * Distributed under the terms of the NewOS License.
10 */
11#include <asm_defs.h>
12
13/*	General exception handling concept:
14
15	The PPC architecture specifies entry point offsets for the various
16	exceptions in the first two physical pages. We put a short piece of code
17	(VEC_ENTRY()) into each exception vector. It calls exception_vector_common,
18	which is defined in the unused space at the beginning of the first physical
19	page. It re-enables address translation and calls ppc_exception_tail which
20	lies in the kernel. It dumps an iframe and invokes ppc_exception_entry()
21	(arch_int.cpp), which handles the exception and returns eventually.
22	The registers are restored from the iframe and we return from the
23	interrupt.
24
25	algorithm overview:
26
27	* VEC_ENTRY
28	* ppc_440_exception_vector_common
29	* ppc_440_exception_tail
30		- dump iframe
31		- ppc_exception_entry()
32		- restore registers and return from interrupt
33
34	Here we use the following SPRG registers, which are at the disposal of the
35	operating system:
36	* SPRG0: Physical address pointer to a struct cpu_exception_context
37			 for the current CPU. The structure contains helpful pointers
38			 as well as some scratch memory for temporarily saving registers.
39	* SPRG1: Scratch.
40
41	struct cpu_exception_context (defined in arch_int.h):
42	offset 0:  virtual address of the exception handler routine in the kernel
43	offset 4:  virtual address of the exception context
44	offset 8:  kernel stack for the current thread
45	offset 12: start of scratch memory for saving registers etc.
46
47	algorithm in detail:
48
49	* VEC_ENTRY
50		- save r1 in SPRG1 and load cpu_exception_context into r1
51		- save r0, save LR in r0
52	* ppc_440_exception_vector_common
53		- params:
54			. r0: old LR
55			. r1: exception context (physical address)
56			. SPRG1: original r1
57		- save r0-3
58		- load virtual exception context address to r1
59		- turn on BAT for exception vector code
60		- turn on address translation
61		- get exception vector offset from LR
62	* ppc_440_exception_tail
63		- params:
64			. r1: exception context (virtual address)
65			. r3: exception vector offset
66			. SPRG1: original r1
67		- turn off BAT
68		- get kernel stack pointer
69		- dump iframe
70		- ppc_exception_entry()
71		- restore registers and return from interrupt
72 */
73
74
75/* exception vector definitions */
76
77/* code in each exception vector */
78#define VEC_ENTRY() \
79	mtsprg1	%r1					; /* temporarily save r1 in SPRG1 */		\
80	mfsprg0	%r1					; /* ppc_cpu_exception_context* -> r1 */	\
81	stw		%r0, 16(%r1)		; /* save r0 */								\
82	mflr	%r0					; /* save LR in r0 */						\
83	bl		ppc_440_exception_vector_common	; /* continue with the common part */
84
85/* defines an exception vector */
86#define DEFINE_VECTOR(offset, name) 	\
87.skip	offset - (. - __440_irqvec_start);	\
88FUNCTION(ppc_440_##name):							\
89	VEC_ENTRY()
90
91
92.global __440_irqvec_start
93__440_irqvec_start:
94	.long	0
95
96/* Called by the exception vector code.
97 * LR:    Points to the end of the exception vector code we're coming from.
98 * r0:    original LR
99 * r1:    ppc_cpu_exception_context* (physical address)
100 * SPRG1: original r1
101 */
102ppc_440_exception_vector_common:
103	stw		%r0, 20(%r1)			/* save original LR */
104	stw		%r2, 24(%r1)			/* save r2 */
105	stw		%r3, 28(%r1)			/* save r3 */
106
107	/* load the virtual address of the ppc_cpu_exception_context for this CPU */
108	lwz		%r1, 4(%r1)
109
110	/* Address translation is turned off. We map this code via BAT, turn on
111	   address translation, and continue in the kernel proper. */
112	li		%r0, 0x10|0x2			/* BATL_MC | BATL_PP_RW */
113	mtibatl	0, %r0					/* load lower word of the instruction BAT */
114	li		%r0, 0x2				/* BEPI = 0, BL = 0 (128 KB), BATU_VS */
115	mtibatu	0, %r0					/* load upper word of the instruction BAT */
116	isync
117	sync
118
119	/* turn on address translation */
120	mfsrr1	%r0						/* load saved msr */
121	rlwinm	%r0, %r0, 28, 30, 31	/* extract mmu bits */
122	mfmsr	%r3						/* load the current msr */
123	rlwimi  %r3, %r0, 4, 26, 27		/* merge the mmu bits with the current msr */
124	li		%r0, 1
125	rlwimi  %r3, %r0, 13, 18, 18	/* turn on FPU, too */
126	mtmsr	%r3						/* load new msr (turning the mmu back on) */
127	isync
128
129	/* Get LR -- it points to the end of the exception vector code. We adjust it
130	   to point to the beginning and can use it to identify the vector later. */
131	mflr	%r3
132	subi	%r3, %r3, 20		/* 5 instructions */
133
134	/* jump to kernel code (ppc_exception_tail) */
135	lwz		%r2, 0(%r1)
136	mtlr	%r2
137	blr
138
139
140DEFINE_VECTOR(0x100, system_reset_exception)
141DEFINE_VECTOR(0x200, machine_check_exception)
142DEFINE_VECTOR(0x300, DSI_exception)
143DEFINE_VECTOR(0x400, ISI_exception)
144DEFINE_VECTOR(0x500, external_interrupt_exception)
145DEFINE_VECTOR(0x600, alignment_exception)
146DEFINE_VECTOR(0x700, program_exception)
147DEFINE_VECTOR(0x800, FP_unavailable_exception)
148DEFINE_VECTOR(0x900, decrementer_exception)
149DEFINE_VECTOR(0xc00, system_call_exception)
150DEFINE_VECTOR(0xd00, trace_exception)
151DEFINE_VECTOR(0xe00, FP_assist_exception)
152DEFINE_VECTOR(0xf00, perf_monitor_exception)
153DEFINE_VECTOR(0xf20, altivec_unavailable_exception)
154DEFINE_VECTOR(0x1000, ITLB_miss_exception)
155DEFINE_VECTOR(0x1100, DTLB_miss_on_load_exception)
156DEFINE_VECTOR(0x1200, DTLB_miss_on_store_exception)
157DEFINE_VECTOR(0x1300, instruction_address_breakpoint_exception)
158DEFINE_VECTOR(0x1400, system_management_exception)
159DEFINE_VECTOR(0x1600, altivec_assist_exception)
160DEFINE_VECTOR(0x1700, thermal_management_exception)
161
162.global __440_irqvec_end
163__440_irqvec_end:
164
165
166/* This is where ppc_440_exception_vector_common continues. We're in the kernel here.
167   r1:    ppc_cpu_exception_context* (virtual address)
168   r3:    exception vector offset
169   SPRG1: original r1
170 */
171FUNCTION(ppc_440_exception_tail):
172	/* turn off BAT */
173	li		%r2, 0
174	mtibatu	0, %r2
175	mtibatl	0, %r2
176	isync
177	sync
178
179	/* save CR */
180	mfcr	%r0
181
182	mfsrr1	%r2					/* load saved msr */
183	andi.	%r2, %r2, (1 << 14)	/* see if it was in kernel mode */
184	beq		.kernel				/* yep */
185
186	/* We come from userland. Load the kernel stack top address for the current
187	   userland thread. */
188	mr		%r2, %r1
189	lwz		%r1, 8(%r1)
190	b		.restore_stack_end
191
192.kernel:
193	mr		%r2, %r1
194	mfsprg1	%r1
195
196.restore_stack_end:
197	/* now r2 points to the ppc_cpu_exception_context, r1 to the kernel stack */
198	/* restore the CR, it was messed up in the previous compare */
199	mtcrf	0xff, %r0
200
201	/* align r1 to 8 bytes, so the iframe will be aligned too */
202	rlwinm	%r1, %r1, 0, 0, 28
203
204	/* save the registers */
205	bl		__440_save_regs
206
207	/* iframe pointer to r4 and a backup to r20 */
208	mr		%r4, %r1
209	mr		%r20, %r1
210
211	/* adjust the stack pointer for ABI compatibility */
212	subi	%r1, %r1, 8				/* make sure there's space for the previous
213									   frame pointer and the return address */
214	rlwinm	%r1, %r1, 0, 0, 27		/* 16 byte align the stack pointer */
215	li		%r0, 0
216	stw		%r0, 0(%r1)				/* previous frame pointer: NULL */
217		/* 4(%r1) is room for the return address to be filled in by the
218		   called function. */
219
220	/* r3: exception vector offset
221	   r4: iframe pointer */
222	bl 		ppc_exception_entry
223
224	/* move the iframe to r1 */
225	mr		%r1, %r20
226
227	b		__440_restore_regs_and_rfi
228
229
230/* called by ppc_440_exception_tail
231 * register expectations:
232 *  r1:        stack
233 *  r2:        ppc_cpu_exception_context*
234 *  SPRG1:     original r1
235 *  r0,r3, LR: scrambled, but saved in scratch memory
236 * all other regs should have been unmodified by the exception handler,
237 * and ready to be saved
238 */
239__440_save_regs:
240	/* Note: The iframe must be 8 byte aligned. The stack pointer we are passed
241	   in r1 is aligned. So we store the floating point registers first and
242	   need to take care that an even number of 4 byte registers is stored,
243	   or insert padding respectively. */
244
245	/* push f0-f31 */
246	stfdu	%f0, -8(%r1)
247	stfdu	%f1, -8(%r1)
248	stfdu	%f2, -8(%r1)
249	stfdu	%f3, -8(%r1)
250	stfdu	%f4, -8(%r1)
251	stfdu	%f5, -8(%r1)
252	stfdu	%f6, -8(%r1)
253	stfdu	%f7, -8(%r1)
254	stfdu	%f8, -8(%r1)
255	stfdu	%f9, -8(%r1)
256	stfdu	%f10, -8(%r1)
257	stfdu	%f11, -8(%r1)
258	stfdu	%f12, -8(%r1)
259	stfdu	%f13, -8(%r1)
260	stfdu	%f14, -8(%r1)
261	stfdu	%f15, -8(%r1)
262	stfdu	%f16, -8(%r1)
263	stfdu	%f17, -8(%r1)
264	stfdu	%f18, -8(%r1)
265	stfdu	%f19, -8(%r1)
266	stfdu	%f20, -8(%r1)
267	stfdu	%f21, -8(%r1)
268	stfdu	%f22, -8(%r1)
269	stfdu	%f23, -8(%r1)
270	stfdu	%f24, -8(%r1)
271	stfdu	%f25, -8(%r1)
272	stfdu	%f26, -8(%r1)
273	stfdu	%f27, -8(%r1)
274	stfdu	%f28, -8(%r1)
275	stfdu	%f29, -8(%r1)
276	stfdu	%f30, -8(%r1)
277	stfdu	%f31, -8(%r1)
278
279	/* push r0-r3 */
280	lwz		%r0, 16(%r2)		/* original r0 */
281	stwu	%r0, -4(%r1)		/* push r0 */
282	mfsprg1	%r0					/* original r1 */
283	stwu	%r0, -4(%r1)		/* push r1 */
284	lwz		%r0, 24(%r2)		/* original r2 */
285	stwu	%r0, -4(%r1)		/* push r2 */
286	lwz		%r0, 28(%r2)		/* original r3 */
287	stwu	%r0, -4(%r1)		/* push r3 */
288
289	/* push r4-r31 */
290	stwu	%r4, -4(%r1)
291	stwu	%r5, -4(%r1)
292	stwu	%r6, -4(%r1)
293	stwu	%r7, -4(%r1)
294	stwu	%r8, -4(%r1)
295	stwu	%r9, -4(%r1)
296	stwu	%r10, -4(%r1)
297	stwu	%r11, -4(%r1)
298	stwu	%r12, -4(%r1)
299	stwu	%r13, -4(%r1)
300	stwu	%r14, -4(%r1)
301	stwu	%r15, -4(%r1)
302	stwu	%r16, -4(%r1)
303	stwu	%r17, -4(%r1)
304	stwu	%r18, -4(%r1)
305	stwu	%r19, -4(%r1)
306	stwu	%r20, -4(%r1)
307	stwu	%r21, -4(%r1)
308	stwu	%r22, -4(%r1)
309	stwu	%r23, -4(%r1)
310	stwu	%r24, -4(%r1)
311	stwu	%r25, -4(%r1)
312	stwu	%r26, -4(%r1)
313	stwu	%r27, -4(%r1)
314	stwu	%r28, -4(%r1)
315	stwu	%r29, -4(%r1)
316	stwu	%r30, -4(%r1)
317	stwu	%r31, -4(%r1)
318
319	/* save some of the other regs */
320	mffs	%f0
321	stfsu	%f0, -4(%r1)		/* push FPSCR */
322	mfctr	%r0
323	stwu	%r0, -4(%r1)		/* push CTR */
324	mfxer	%r0
325	stwu	%r0, -4(%r1)		/* push XER */
326	mfcr	%r0
327	stwu	%r0, -4(%r1)		/* push CR */
328	lwz		%r0, 20(%r2)		/* original LR */
329	stwu	%r0, -4(%r1)		/* push LR */
330	mfspr	%r0, %dsisr
331	stwu	%r0, -4(%r1)		/* push DSISR */
332	mfspr	%r0, %dar
333	stwu	%r0, -4(%r1)		/* push DAR */
334	mfspr	%r0, %srr1
335	stwu	%r0, -4(%r1)		/* push SRR1 */
336	mfspr	%r0, %srr0
337	stwu	%r0, -4(%r1)		/* push SRR0 */
338
339	stwu	%r3, -4(%r1)		/* exception vector offset */
340
341	blr
342
343
344/* called at the tail end of each of the exceptions
345 * r1: iframe pointer
346 */
347__440_restore_regs_and_rfi:
348	lwzu	%r0, 4(%r1)		/* SRR0 (skip vector offset) */
349	mtspr	%srr0, %r0
350	lwzu	%r0, 4(%r1)		/* SRR1 */
351	mtspr	%srr1, %r0
352	lwzu	%r0, 4(%r1)		/* DAR */
353	mtspr	%dar, %r0
354	lwzu	%r0, 4(%r1)		/* DSISR */
355	mtspr	%dsisr, %r0
356	lwzu	%r0, 4(%r1)		/* LR */
357	mtlr	%r0
358	lwzu	%r0, 4(%r1)		/* CR */
359	mtcr	%r0
360	lwzu	%r0, 4(%r1)		/* XER */
361	mtxer	%r0
362	lwzu	%r0, 4(%r1)		/* CTR */
363	mtctr	%r0
364	lfsu	%f0, 4(%r1)		/* FPSCR */
365	mtfsf	0xff, %f0
366
367	lwzu	%r31, 4(%r1)
368	lwzu	%r30, 4(%r1)
369	lwzu	%r29, 4(%r1)
370	lwzu	%r28, 4(%r1)
371	lwzu	%r27, 4(%r1)
372	lwzu	%r26, 4(%r1)
373	lwzu	%r25, 4(%r1)
374	lwzu	%r24, 4(%r1)
375	lwzu	%r23, 4(%r1)
376	lwzu	%r22, 4(%r1)
377	lwzu	%r21, 4(%r1)
378	lwzu	%r20, 4(%r1)
379	lwzu	%r19, 4(%r1)
380	lwzu	%r18, 4(%r1)
381	lwzu	%r17, 4(%r1)
382	lwzu	%r16, 4(%r1)
383	lwzu	%r15, 4(%r1)
384	lwzu	%r14, 4(%r1)
385	lwzu	%r13, 4(%r1)
386	lwzu	%r12, 4(%r1)
387	lwzu	%r11, 4(%r1)
388	lwzu	%r10, 4(%r1)
389	lwzu	%r9, 4(%r1)
390	lwzu	%r8, 4(%r1)
391	lwzu	%r7, 4(%r1)
392	lwzu	%r6, 4(%r1)
393	lwzu	%r5, 4(%r1)
394	lwzu	%r4, 4(%r1)
395	lwzu	%r3, 4(%r1)
396
397	/* Stop here, before we overwrite r1, and continue with the floating point
398	   registers first. */
399	addi	%r2, %r1, 16		/* skip r3-r0 */
400
401	/* f31-f0 */
402	lfd		%f31, 0(%r2)
403	lfdu	%f30, 8(%r2)
404	lfdu	%f29, 8(%r2)
405	lfdu	%f28, 8(%r2)
406	lfdu	%f27, 8(%r2)
407	lfdu	%f26, 8(%r2)
408	lfdu	%f25, 8(%r2)
409	lfdu	%f24, 8(%r2)
410	lfdu	%f23, 8(%r2)
411	lfdu	%f22, 8(%r2)
412	lfdu	%f21, 8(%r2)
413	lfdu	%f20, 8(%r2)
414	lfdu	%f19, 8(%r2)
415	lfdu	%f18, 8(%r2)
416	lfdu	%f17, 8(%r2)
417	lfdu	%f16, 8(%r2)
418	lfdu	%f15, 8(%r2)
419	lfdu	%f14, 8(%r2)
420	lfdu	%f13, 8(%r2)
421	lfdu	%f12, 8(%r2)
422	lfdu	%f11, 8(%r2)
423	lfdu	%f10, 8(%r2)
424	lfdu	%f9, 8(%r2)
425	lfdu	%f8, 8(%r2)
426	lfdu	%f7, 8(%r2)
427	lfdu	%f6, 8(%r2)
428	lfdu	%f5, 8(%r2)
429	lfdu	%f4, 8(%r2)
430	lfdu	%f3, 8(%r2)
431	lfdu	%f2, 8(%r2)
432	lfdu	%f1, 8(%r2)
433	lfd		%f0, 8(%r2)
434
435	/* r2-r0 */
436	lwzu	%r2, 4(%r1)
437	lwz		%r0, 8(%r1)
438	lwz		%r1, 4(%r1)
439
440	/* return from interrupt */
441	rfi
442