1/* $OpenBSD: trap_subr.S,v 1.19 2020/12/30 06:06:30 gkoehler Exp $ */
2/* $NetBSD: trap_subr.S,v 1.20 2002/04/22 23:20:08 kleink Exp $	*/
3
4/*-
5 * Copyright (C) 1995, 1996 Wolfgang Solfrank.
6 * Copyright (C) 1995, 1996 TooLs GmbH.
7 * All rights reserved.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions
11 * are met:
12 * 1. Redistributions of source code must retain the above copyright
13 *    notice, this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright
15 *    notice, this list of conditions and the following disclaimer in the
16 *    documentation and/or other materials provided with the distribution.
17 * 3. All advertising materials mentioning features or use of this software
18 *    must display the following acknowledgement:
19 *	This product includes software developed by TooLs GmbH.
20 * 4. The name of TooLs GmbH may not be used to endorse or promote products
21 *    derived from this software without specific prior written permission.
22 *
23 * THIS SOFTWARE IS PROVIDED BY TOOLS GMBH ``AS IS'' AND ANY EXPRESS OR
24 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
25 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
26 * IN NO EVENT SHALL TOOLS GMBH BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
27 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
28 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
29 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
30 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
31 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
32 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 */
34
35#include "assym.h"
36
37#include <machine/param.h>
38#include <machine/psl.h>
39#include <machine/trap.h>
40
41#define SPR_VRSAVE	256
42
43	.abiversion 2
44
45#define GET_CPUINFO(r) \
46	mfsprg0  r
47
48#define GET_TOCBASE(r) \
49	bl	99f;							\
5099:	mflr	r;							\
51	addis	r, r, (.TOC. - 99b)@ha;					\
52	addi	r, r, (.TOC. - 99b)@l;
53
54/*
55 * Restore SRs for a pmap
56 *
57 * Requires that r28-r31 be scratch, with r28 initialized to the SLB cache
58 */
59
60restore_usersrs:
61	GET_CPUINFO(%r28)
62	ld	%r28, CI_USER_SLB_PA(%r28)
63	li	%r29, 0			/* Set the counter to zero */
64
65	/* Invalidate entire SLB */
66	slbia
67	slbmfee	%r31, %r29
68	clrrdi	%r31, %r31, 28
69	slbie	%r31
70
711:	ld	%r31, 0(%r28)		/* Load SLBE */
72	cmpdi	%r31, 0			/* If SLBE is not valid, stop */
73	beqlr
74	ld	%r30, 8(%r28)		/* Load SLBV  */
75	slbmte	%r30, %r31		/* Install SLB entry */
76
77	addi	%r28, %r28, 16		/* Advance pointer */
78	addi	%r29, %r29, 1
79	cmpdi	%r29, 32		/* Repeat if we are not at the end */
80	blt	1b
81	blr
82
83restore_kernsrs:
84	GET_CPUINFO(%r28)
85	addi	%r28, %r28, CI_KERNEL_SLB
86
87	li	%r29, 0			/* Set the counter to zero */
88
89	/* Invalidate entire SLB */
90	slbia
91	slbmfee	%r31, %r29
92	clrrdi	%r31, %r31, 28
93	slbie	%r31
94
951:	ld	%r31, 0(%r28)		/* Load SLBE */
96	cmpdi	%r31, 0			/* If SLBE is not valid, stop */
97	beqlr
98	ld	%r30, 8(%r28)		/* Load SLBV  */
99	slbmte	%r30, %r31		/* Install SLB entry */
100
101	addi	%r28, %r28, 16		/* Advance pointer */
102	addi	%r29, %r29, 1
103	cmpdi	%r29, 31		/* Repeat if we are not at the end */
104	blt	1b
105	blr
106
107/*
108 * FRAME_SETUP assumes:
109 *	SPRG1		SP (1)
110 * 	SPRG3		trap type
111 *	savearea	r27-r31,DAR,DSISR   (DAR & DSISR only for DSI traps)
112 *	r28		LR
113 *	r29		CR
114 *	r30		scratch
115 *	r31		scratch
116 *	r1		kernel stack
117 *	SRR0/1		as at start of trap
118 *
119 * NOTE: SPRG1 is never used while the MMU is on, making it safe to reuse
120 * in any real-mode fault handler, including those handling double faults.
121 */
122#define	FRAME_SETUP(savearea)						\
123/* Have to enable translation to allow access of kernel stack: */	\
124	GET_CPUINFO(%r31);						\
125	mfsrr0	%r30;							\
126	std	%r30, (savearea+CPUSAVE_SRR0)(%r31);	/* save SRR0 */	\
127	mfsrr1	%r30;							\
128	std	%r30, (savearea+CPUSAVE_SRR1)(%r31);	/* save SRR1 */	\
129	mfsprg1	%r31;			/* get saved SP (clears SPRG1) */ \
130	mfmsr	%r30;							\
131	ori	%r30, %r30, (PSL_DR|PSL_IR|PSL_RI)@l; /* relocation on */ \
132	mtmsr	%r30;			/* stack can now be accessed */	\
133	isync;								\
134	stdu	%r31, -(FRAMELEN+288)(%r1); /* save it in the callframe */ \
135	std	%r0, FRAME_0+32(%r1);	/* save r0 in the trapframe */	\
136	std	%r31, FRAME_1+32(%r1);	/* save SP   "      "       */	\
137	std	%r2, FRAME_2+32(%r1);	/* save r2   "      "       */	\
138	std	%r28, FRAME_LR+32(%r1);	/* save LR   "      "       */	\
139	std	%r29, FRAME_CR+32(%r1);	/* save CR   "      "       */	\
140	GET_CPUINFO(%r2);						\
141	ld	%r27, (savearea+CPUSAVE_R27)(%r2); /* get saved r27 */	\
142	ld	%r28, (savearea+CPUSAVE_R28)(%r2); /* get saved r28 */	\
143	ld	%r29, (savearea+CPUSAVE_R29)(%r2); /* get saved r29 */	\
144	ld	%r30, (savearea+CPUSAVE_R30)(%r2); /* get saved r30 */	\
145	ld	%r31, (savearea+CPUSAVE_R31)(%r2); /* get saved r31 */	\
146	std	%r3, FRAME_3+32(%r1);	/* save r3-r31 */		\
147	std	%r4, FRAME_4+32(%r1);					\
148	std	%r5, FRAME_5+32(%r1);					\
149	std	%r6, FRAME_6+32(%r1);					\
150	std	%r7, FRAME_7+32(%r1);					\
151	std	%r8, FRAME_8+32(%r1);					\
152	std	%r9, FRAME_9+32(%r1);					\
153	std	%r10, FRAME_10+32(%r1);					\
154	std	%r11, FRAME_11+32(%r1);					\
155	std	%r12, FRAME_12+32(%r1);					\
156	std	%r13, FRAME_13+32(%r1);					\
157	std	%r14, FRAME_14+32(%r1);					\
158	std	%r15, FRAME_15+32(%r1);					\
159	std	%r16, FRAME_16+32(%r1);					\
160	std	%r17, FRAME_17+32(%r1);					\
161	std	%r18, FRAME_18+32(%r1);					\
162	std	%r19, FRAME_19+32(%r1);					\
163	std	%r20, FRAME_20+32(%r1);					\
164	std	%r21, FRAME_21+32(%r1);					\
165	std	%r22, FRAME_22+32(%r1);					\
166	std	%r23, FRAME_23+32(%r1);					\
167	std	%r24, FRAME_24+32(%r1);					\
168	std	%r25, FRAME_25+32(%r1);					\
169	std	%r26, FRAME_26+32(%r1);					\
170	std	%r27, FRAME_27+32(%r1);					\
171	std	%r28, FRAME_28+32(%r1);					\
172	std	%r29, FRAME_29+32(%r1);					\
173	std	%r30, FRAME_30+32(%r1);					\
174	std	%r31, FRAME_31+32(%r1);					\
175	ld	%r28, (savearea+CPUSAVE_DAR)(%r2);  /* saved DAR */	\
176	ld	%r29, (savearea+CPUSAVE_DSISR)(%r2);/* saved DSISR */	\
177	ld	%r30, (savearea+CPUSAVE_SRR0)(%r2); /* saved SRR0 */	\
178	ld	%r31, (savearea+CPUSAVE_SRR1)(%r2); /* saved SRR1 */	\
179	mfxer	%r3;							\
180	mfctr	%r4;							\
181	mfsprg3	%r5;							\
182	mfspr	%r6, SPR_VRSAVE;					\
183	std	%r3, FRAME_XER+32(%r1);	/* save xer/ctr/exc */		\
184	std	%r4, FRAME_CTR+32(%r1);					\
185	std	%r5, FRAME_EXC+32(%r1);					\
186	std	%r6, FRAME_VRSAVE+32(%r1);				\
187	std	%r28, FRAME_DAR+32(%r1);				\
188	std	%r29, FRAME_DSISR+32(%r1); /* save dsisr/srr0/srr1 */	\
189	std	%r30, FRAME_SRR0+32(%r1);				\
190	std	%r31, FRAME_SRR1+32(%r1);
191
192#define FRAME_LEAVE(savearea)						\
193/* Disable exceptions: */						\
194	mfmsr	%r2;							\
195	andi.	%r2,%r2,~PSL_EE@l;					\
196	mtmsr	%r2;							\
197	isync;								\
198/* Now restore regs: */							\
199	ld	%r2, FRAME_SRR0+32(%r1);				\
200	ld	%r3, FRAME_SRR1+32(%r1);				\
201	ld	%r4, FRAME_CTR+32(%r1);					\
202	ld	%r5, FRAME_XER+32(%r1);					\
203	ld	%r6, FRAME_LR+32(%r1);					\
204	GET_CPUINFO(%r7);						\
205	std	%r2, (savearea+CPUSAVE_SRR0)(%r7); /* save SRR0 */	\
206	std	%r3, (savearea+CPUSAVE_SRR1)(%r7); /* save SRR1 */	\
207	ld	%r7, FRAME_CR+32(%r1);					\
208	ld	%r8, FRAME_VRSAVE+32(%r1);				\
209	mtctr	%r4;							\
210	mtxer	%r5;							\
211	mtlr	%r6;							\
212	mtsprg2	%r7;							\
213	mtspr	SPR_VRSAVE, %r8;					\
214	ld	%r31, FRAME_31+32(%r1);	/* restore r0-31 */		\
215	ld	%r30, FRAME_30+32(%r1);					\
216	ld	%r29, FRAME_29+32(%r1);					\
217	ld	%r28, FRAME_28+32(%r1);					\
218	ld	%r27, FRAME_27+32(%r1);					\
219	ld	%r26, FRAME_26+32(%r1);					\
220	ld	%r25, FRAME_25+32(%r1);					\
221	ld	%r24, FRAME_24+32(%r1);					\
222	ld	%r23, FRAME_23+32(%r1);					\
223	ld	%r22, FRAME_22+32(%r1);					\
224	ld	%r21, FRAME_21+32(%r1);					\
225	ld	%r20, FRAME_20+32(%r1);					\
226	ld	%r19, FRAME_19+32(%r1);					\
227	ld	%r18, FRAME_18+32(%r1);					\
228	ld	%r17, FRAME_17+32(%r1);					\
229	ld	%r16, FRAME_16+32(%r1);					\
230	ld	%r15, FRAME_15+32(%r1);					\
231	ld	%r14, FRAME_14+32(%r1);					\
232	ld	%r13, FRAME_13+32(%r1);					\
233	ld	%r12, FRAME_12+32(%r1);					\
234	ld	%r11, FRAME_11+32(%r1);					\
235	ld	%r10, FRAME_10+32(%r1);					\
236	ld	%r9, FRAME_9+32(%r1);					\
237	ld	%r8, FRAME_8+32(%r1);					\
238	ld	%r7, FRAME_7+32(%r1);					\
239	ld	%r6, FRAME_6+32(%r1);					\
240	ld	%r5, FRAME_5+32(%r1);					\
241	ld	%r4, FRAME_4+32(%r1);					\
242	ld	%r3, FRAME_3+32(%r1);					\
243	ld	%r2, FRAME_2+32(%r1);					\
244	ld	%r0, FRAME_0+32(%r1);					\
245	ld	%r1, FRAME_1+32(%r1);					\
246/* Can't touch %r1 from here on */					\
247	mtsprg3	%r3;			/* save r3 */			\
248/* Disable translation, machine check and recoverability: */		\
249	mfmsr	%r3;							\
250	andi.	%r3, %r3, ~(PSL_DR|PSL_IR|PSL_ME|PSL_RI)@l;		\
251	mtmsr	%r3;							\
252	isync;								\
253/* Decide whether we return to user mode: */				\
254	GET_CPUINFO(%r3);						\
255	ld	%r3, (savearea+CPUSAVE_SRR1)(%r3);			\
256	mtcr	%r3;							\
257	bf	17, 1f;			/* branch if PSL_PR is false */	\
258/* Restore user SRs */							\
259	GET_CPUINFO(%r3);						\
260	std	%r27, (savearea+CPUSAVE_R27)(%r3);			\
261	std	%r28, (savearea+CPUSAVE_R28)(%r3);			\
262	std	%r29, (savearea+CPUSAVE_R29)(%r3);			\
263	std	%r30, (savearea+CPUSAVE_R30)(%r3);			\
264	std	%r31, (savearea+CPUSAVE_R31)(%r3);			\
265	mflr	%r27;			/* preserve LR */		\
266	bl	restore_usersrs;	/* uses r28-r31 */		\
267	mtlr	%r27;							\
268	ld	%r31, (savearea+CPUSAVE_R31)(%r3);			\
269	ld	%r30, (savearea+CPUSAVE_R30)(%r3);			\
270	ld	%r29, (savearea+CPUSAVE_R29)(%r3);			\
271	ld	%r28, (savearea+CPUSAVE_R28)(%r3);			\
272	ld	%r27, (savearea+CPUSAVE_R27)(%r3);			\
2731:	mfsprg2	%r3;			/* restore cr */		\
274	mtcr	%r3;							\
275	GET_CPUINFO(%r3);						\
276	ld	%r3, (savearea+CPUSAVE_SRR0)(%r3); /* restore srr0 */	\
277	mtsrr0	%r3;							\
278	GET_CPUINFO(%r3);						\
279	ld	%r3, (savearea+CPUSAVE_SRR1)(%r3); /* restore srr1 */	\
280	mtsrr1	%r3;							\
281	mfsprg3	%r3			/* restore r3 */
282
283
284	.text
285
286	.globl trapcode, trapcodeend
287trapcode:
288	mtsprg1	%r1
289	mflr	%r1
290	mtsprg2	%r1
291	ld	%r1, TRAP_ENTRY(0)
292	mtlr	%r1
293	li	%r1, 0xe0
294	blrl
295trapcodeend:
296
297	.globl hvtrapcode, hvtrapcodeend
298hvtrapcode:
299	mtsprg1	%r1
300	mflr	%r1
301	mtsprg2	%r1
302	ld	%r1, TRAP_HVENTRY(0)
303	mtlr	%r1
304	li	%r1, 0xe0
305	blrl
306hvtrapcodeend:
307
308/* System reset might be an exit from power-saving mode. */
309	.globl rsttrapcode, rsttrapcodeend
310rsttrapcode:
311	mtsprg1	%r1
312	mfcr	%r1
313	mtsprg2	%r1			/* save cr */
314	mfsrr1	%r1
315	andis.	%r1, %r1, 0x3		/* test srr1 bits 46:47 */
316	beq	1f
317	/* This is an exit from power-saving mode. */
318	ld	%r1, TRAP_RSTENTRY(0)	/* cpu_idle_restore_context */
319	mtctr	%r1
320	bctr
3211:	/* This is something else. */
322	mfsprg2	%r1
323	mtcr	%r1			/* restore cr */
324	mflr	%r1
325	mtsprg2	%r1
326	ld	%r1, TRAP_ENTRY(0)	/* generictrap */
327	mtlr	%r1
328	li	%r1, 0xe0
329	blrl
330rsttrapcodeend:
331
332/*
333 * For SLB misses: do special things for the kernel
334 *
335 * Note: SPRG1 is always safe to overwrite any time the MMU was on, which is
336 * the only time this can be called.
337 */
338	.globl slbtrapcode, slbtrapcodeend
339slbtrapcode:
340	/* 0x00 */
341	mtsprg1	%r1			/* save SP */
342	GET_CPUINFO(%r1)
343	std	%r2, (CI_SLBSAVE+16)(%r1)	/* save r2 */
344	mfcr	%r2
345	/* 0x10 */
346	std	%r2, (CI_SLBSAVE+104)(%r1)	/* save CR */
347	mfsrr1	%r2			/* test kernel mode */
348	mtcr	%r2
349	bf	17, 1f			/* branch if PSL_PR is false */
350	/* 0x20 */
351	/* User mode */
352	ld	%r2, (CI_SLBSAVE+104)(%r1)
353	mtcr	%r2				/* restore CR */
354	ld	%r2, (CI_SLBSAVE+16)(%r1) 	/* restore r2 */
355	mflr	%r1
356	/* 0x30 */
357	mtsprg2 %r1				/* save LR in SPRG2 */
358	ld	%r1, TRAP_ENTRY(0)
359	mtlr	%r1
360	li	%r1, 0x80		/* How to get the vector from LR */
361	/* 0x40 */
362	blrl				/* Branch to generictrap */
3631:	mflr	%r2			/* Save the old LR in r2 */
364	/* Kernel mode */
365	ld	%r1, TRAP_SLBENTRY(0)
366	mtlr	%r1
367	/* 0x50 */
368	GET_CPUINFO(%r1)
369	blrl					/* Branch to kern_slbtrap */
370/* must fit in 128 bytes! */
371slbtrapcodeend:
372
373/*
374 * On entry:
375 * SPRG1: SP
376 * r1: pcpu
377 * r2: LR
378 * LR: branch address in trap region
379 */
380	.globl kern_slbtrap
381kern_slbtrap:
382	std	%r2, (CI_SLBSAVE+136)(%r1) /* old LR */
383	std	%r3, (CI_SLBSAVE+24)(%r1) /* save R3 */
384
385	/* Check if this needs to be handled as a regular trap (userseg miss) */
386	mfdar	%r2
387	lis	%r3, SEGMENT_MASK@h
388	ori	%r3, %r3, SEGMENT_MASK@l
389	andc	%r2, %r2, %r3	/* R2 = segment base address */
390	lis	%r3, USER_ADDR@highesta
391	ori	%r3, %r3, USER_ADDR@highera
392	sldi	%r3, %r3, 32
393	oris	%r3, %r3, USER_ADDR@ha
394	ori	%r3, %r3, USER_ADDR@l
395	cmpd	%r2, %r3	/* Compare fault base to USER_ADDR */
396	bne	1f
397
398	/* User seg miss, handle as a regular trap */
399	ld	%r2, (CI_SLBSAVE+104)(%r1) /* Restore CR */
400	mtcr	%r2
401	ld	%r2, (CI_SLBSAVE+16)(%r1) /* Restore R2,R3 */
402	ld	%r3, (CI_SLBSAVE+24)(%r1)
403	ld	%r1, (CI_SLBSAVE+136)(%r1) /* Save the old LR in r1 */
404	mtsprg2 %r1			/* And then in SPRG2 */
405	li	%r1, 0x80		/* How to get the vector from LR */
406	b	generictrap		/* Retain old LR using b */
407
4081:	/* Real kernel SLB miss */
409	std	%r0, (CI_SLBSAVE+0)(%r1) /* free all volatile regs */
410	mfsprg1	%r2			/* Old R1 */
411	std	%r2, (CI_SLBSAVE+8)(%r1)
412	/* R2, R3 already saved */
413	std	%r4, (CI_SLBSAVE+32)(%r1)
414	std	%r5, (CI_SLBSAVE+40)(%r1)
415	std	%r6, (CI_SLBSAVE+48)(%r1)
416	std	%r7, (CI_SLBSAVE+56)(%r1)
417	std	%r8, (CI_SLBSAVE+64)(%r1)
418	std	%r9, (CI_SLBSAVE+72)(%r1)
419	std	%r10, (CI_SLBSAVE+80)(%r1)
420	std	%r11, (CI_SLBSAVE+88)(%r1)
421	std	%r12, (CI_SLBSAVE+96)(%r1)
422	/* CR already saved */
423	mfxer	%r2			/* save XER */
424	std	%r2, (CI_SLBSAVE+112)(%r1)
425	mflr	%r2			/* save LR (SP already saved) */
426	std	%r2, (CI_SLBSAVE+120)(%r1)
427	mfctr	%r2			/* save CTR */
428	std	%r2, (CI_SLBSAVE+128)(%r1)
429
430	/* Call handler */
431	addi	%r1, %r1, CI_SLBSTACK-48+1024
432	li	%r2, ~15
433	and	%r1, %r1, %r2
434	GET_TOCBASE(%r2)
435	mfdar	%r3
436	bl	pmap_spill_kernel_slb
437	nop
438
439	/* Save r28-31, restore r4-r12 */
440	GET_CPUINFO(%r1)
441	ld	%r4, (CI_SLBSAVE+32)(%r1)
442	ld	%r5, (CI_SLBSAVE+40)(%r1)
443	ld	%r6, (CI_SLBSAVE+48)(%r1)
444	ld	%r7, (CI_SLBSAVE+56)(%r1)
445	ld	%r8, (CI_SLBSAVE+64)(%r1)
446	ld	%r9, (CI_SLBSAVE+72)(%r1)
447	ld	%r10, (CI_SLBSAVE+80)(%r1)
448	ld	%r11, (CI_SLBSAVE+88)(%r1)
449	ld	%r12, (CI_SLBSAVE+96)(%r1)
450	std	%r28, (CI_SLBSAVE+64)(%r1)
451	std	%r29, (CI_SLBSAVE+72)(%r1)
452	std	%r30, (CI_SLBSAVE+80)(%r1)
453	std	%r31, (CI_SLBSAVE+88)(%r1)
454
455	/* Restore kernel mapping */
456	bl	restore_kernsrs
457
458	/* Restore remaining registers */
459	ld	%r28, (CI_SLBSAVE+64)(%r1)
460	ld	%r29, (CI_SLBSAVE+72)(%r1)
461	ld	%r30, (CI_SLBSAVE+80)(%r1)
462	ld	%r31, (CI_SLBSAVE+88)(%r1)
463
464	ld	%r2, (CI_SLBSAVE+104)(%r1)
465	mtcr	%r2
466	ld	%r2, (CI_SLBSAVE+112)(%r1)
467	mtxer	%r2
468	ld	%r2, (CI_SLBSAVE+120)(%r1)
469	mtlr	%r2
470	ld	%r2, (CI_SLBSAVE+128)(%r1)
471	mtctr	%r2
472	ld	%r2, (CI_SLBSAVE+136)(%r1)
473	mtlr	%r2
474
475	/* Restore r0-r3 */
476	ld	%r0, (CI_SLBSAVE+0)(%r1)
477	ld	%r2, (CI_SLBSAVE+16)(%r1)
478	ld	%r3, (CI_SLBSAVE+24)(%r1)
479	mfsprg1	%r1
480
481	/* Back to whatever we were doing */
482	rfid
483
484/*
485 * generichvtrap makes a hypervisor trap look like a normal trap.
486 */
487
488	.globl generichvtrap
489generichvtrap:
490	/* Move HSRR0/HSRR1 to SSR0/SRR1 */
491	mtsprg3	%r1
492	mfspr	%r1, 314	/* HSRR0 */
493	mtsrr0	%r1
494	mfspr	%r1, 315	/* HSRR1 */
495	mtsrr1	%r1
496	mfsprg3	%r1
497	/* FALLTHROUGH */
498
499/*
500 * generictrap does some standard setup for trap handling to minimize
501 * the code that need be installed in the actual vectors. It expects
502 * the following conditions.
503 *
504 * R1 - Trap vector = LR & (0xff00 | R1)
505 * SPRG1 - Original R1 contents
506 * SPRG2 - Original LR
507 */
508
509	.globl generictrap
510	.type generictrap, @function
511generictrap:
512	/* Save R1 for computing the exception vector */
513	mtsprg3	%r1
514
515	/* Save interesting registers */
516	GET_CPUINFO(%r1)
517	std	%r27, (CI_TEMPSAVE+CPUSAVE_R27)(%r1)	/* free r27-r31 */
518	std	%r28, (CI_TEMPSAVE+CPUSAVE_R28)(%r1)
519	std	%r29, (CI_TEMPSAVE+CPUSAVE_R29)(%r1)
520	std	%r30, (CI_TEMPSAVE+CPUSAVE_R30)(%r1)
521	std	%r31, (CI_TEMPSAVE+CPUSAVE_R31)(%r1)
522	mfdar	%r30
523	std	%r30, (CI_TEMPSAVE+CPUSAVE_DAR)(%r1)
524	mfdsisr	%r30
525	std	%r30, (CI_TEMPSAVE+CPUSAVE_DSISR)(%r1)
526	mfsprg1	%r1			/* restore SP, in case of branch */
527	mfsprg2	%r28			/* save LR */
528	mfcr	%r29			/* save CR */
529
530	/* Compute the exception vector from the link register */
531	mfsprg3 %r31
532	ori	%r31, %r31, 0xff00
533	mflr	%r30
534	addi	%r30, %r30, -4 /* The branch instruction, not the next */
535	and	%r30, %r30, %r31
536	mtsprg3	%r30
537
538	/* Test whether we already had PR set */
539	mfsrr1	%r31
540	mtcr	%r31
541	bf	17, k_trap		/* branch if PSL_PR is false */
542
543u_trap:
544	GET_CPUINFO(%r1)
545	ld	%r1, CI_CURPCB(%r1)
546	addi	%r1, %r1, USPACE
547	mr	%r27, %r28
548	mtsprg2	%r29
549	bl	restore_kernsrs		/* enable kernel mapping */
550	mfsprg2	%r29
551	mr	%r28, %r27
552
553k_trap:
554	FRAME_SETUP(CI_TEMPSAVE)
555	GET_TOCBASE(%r2)
556trapagain:
557	addi	%r3, %r1, 32
558	bl	trap
559
560	.globl	trapexit
561trapexit:
562/* Disable interrupts: */
563	mfmsr	%r3
564	andi.	%r3, %r3, ~PSL_EE@l
565	mtmsr	%r3
566	isync
567/* Test AST pending: */
568	ld	%r5, FRAME_SRR1+32(%r1)
569	mtcr	%r5
570	bf	17, 1f			/* branch if PSL_PR is false */
571
572	GET_CPUINFO(%r3)		/* get per-CPU pointer */
573	ld	%r4, CI_CURPROC(%r3)
574	lwz	%r4, P_MD_ASTPENDING(%r4)
575	cmpwi	%r4, 0
576	beq	1f
577	li	%r6, EXC_AST
578	std	%r6, FRAME_EXC+32(%r1)
579	b	trapagain
5801:
581	FRAME_LEAVE(CI_TEMPSAVE)
582	rfid
583