exception.S revision 89047
1/*-
2 * Copyright (c) 1997 Berkeley Software Design, Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 *    notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 *    notice, this list of conditions and the following disclaimer in the
11 *    documentation and/or other materials provided with the distribution.
12 * 3. Berkeley Software Design Inc's name may not be used to endorse or
13 *    promote products derived from this software without specific prior
14 *    written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY BERKELEY SOFTWARE DESIGN INC ``AS IS'' AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED.  IN NO EVENT SHALL BERKELEY SOFTWARE DESIGN INC BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 *
28 *	from BSDI: locore.s,v 1.36.2.15 1999/08/23 22:34:41 cp Exp
29 */
30/*-
31 * Copyright (c) 2001 Jake Burkholder.
32 * All rights reserved.
33 *
34 * Redistribution and use in source and binary forms, with or without
35 * modification, are permitted provided that the following conditions
36 * are met:
37 * 1. Redistributions of source code must retain the above copyright
38 *    notice, this list of conditions and the following disclaimer.
39 * 2. Redistributions in binary form must reproduce the above copyright
40 *    notice, this list of conditions and the following disclaimer in the
41 *    documentation and/or other materials provided with the distribution.
42 *
43 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
44 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
45 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
46 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
47 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
48 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
49 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
50 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
51 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
52 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
53 * SUCH DAMAGE.
54 *
55 * $FreeBSD: head/sys/sparc64/sparc64/exception.S 89047 2002-01-08 05:17:28Z jake $
56 */
57
58#include "opt_ddb.h"
59
60#include <machine/asi.h>
61#include <machine/asmacros.h>
62#include <machine/ktr.h>
63#include <machine/pstate.h>
64#include <machine/trap.h>
65#include <machine/tstate.h>
66#include <machine/wstate.h>
67
68#include "assym.s"
69
70	.register %g2,#ignore
71	.register %g3,#ignore
72	.register %g6,#ignore
73	.register %g7,#ignore
74
75/*
76 * Atomically set the reference bit in a tte.
77 */
78#define	TTE_SET_BIT(r1, r2, r3, bit) \
79	add	r1, TTE_DATA, r1 ; \
80	ldx	[r1], r2 ; \
819:	or	r2, bit, r3 ; \
82	casxa	[r1] ASI_N, r2, r3 ; \
83	cmp	r2, r3 ; \
84	bne,pn	%xcc, 9b ; \
85	 mov	r3, r2
86
87#define	TTE_SET_REF(r1, r2, r3)		TTE_SET_BIT(r1, r2, r3, TD_REF)
88#define	TTE_SET_W(r1, r2, r3)		TTE_SET_BIT(r1, r2, r3, TD_W)
89
90/*
91 * Macros for spilling and filling live windows.
92 *
93 * NOTE: These macros use exactly 16 instructions, and it is assumed that the
94 * handler will not use more than 24 instructions total, to leave room for
95 * resume vectors which occupy the last 8 instructions.
96 */
97
98#define	SPILL(storer, base, size, asi) \
99	storer	%l0, [base + (0 * size)] asi ; \
100	storer	%l1, [base + (1 * size)] asi ; \
101	storer	%l2, [base + (2 * size)] asi ; \
102	storer	%l3, [base + (3 * size)] asi ; \
103	storer	%l4, [base + (4 * size)] asi ; \
104	storer	%l5, [base + (5 * size)] asi ; \
105	storer	%l6, [base + (6 * size)] asi ; \
106	storer	%l7, [base + (7 * size)] asi ; \
107	storer	%i0, [base + (8 * size)] asi ; \
108	storer	%i1, [base + (9 * size)] asi ; \
109	storer	%i2, [base + (10 * size)] asi ; \
110	storer	%i3, [base + (11 * size)] asi ; \
111	storer	%i4, [base + (12 * size)] asi ; \
112	storer	%i5, [base + (13 * size)] asi ; \
113	storer	%i6, [base + (14 * size)] asi ; \
114	storer	%i7, [base + (15 * size)] asi
115
116#define	FILL(loader, base, size, asi) \
117	loader	[base + (0 * size)] asi, %l0 ; \
118	loader	[base + (1 * size)] asi, %l1 ; \
119	loader	[base + (2 * size)] asi, %l2 ; \
120	loader	[base + (3 * size)] asi, %l3 ; \
121	loader	[base + (4 * size)] asi, %l4 ; \
122	loader	[base + (5 * size)] asi, %l5 ; \
123	loader	[base + (6 * size)] asi, %l6 ; \
124	loader	[base + (7 * size)] asi, %l7 ; \
125	loader	[base + (8 * size)] asi, %i0 ; \
126	loader	[base + (9 * size)] asi, %i1 ; \
127	loader	[base + (10 * size)] asi, %i2 ; \
128	loader	[base + (11 * size)] asi, %i3 ; \
129	loader	[base + (12 * size)] asi, %i4 ; \
130	loader	[base + (13 * size)] asi, %i5 ; \
131	loader	[base + (14 * size)] asi, %i6 ; \
132	loader	[base + (15 * size)] asi, %i7
133
134#define	ERRATUM50(reg)	mov reg, reg
135
136#define	KSTACK_SLOP	1024
137
138#define	KSTACK_CHECK \
139	dec	16, ASP_REG ; \
140	stx	%g1, [ASP_REG + 0] ; \
141	stx	%g2, [ASP_REG + 8] ; \
142	add	%sp, SPOFF, %g1 ; \
143	andcc	%g1, (1 << PTR_SHIFT) - 1, %g0 ; \
144	bnz,a	%xcc, tl1_kstack_fault ; \
145	 inc	16, ASP_REG ; \
146	ldx	[PCPU(CURTHREAD)], %g2 ; \
147	ldx	[%g2 + TD_KSTACK], %g2 ; \
148	add	%g2, KSTACK_SLOP, %g2 ; \
149	subcc	%g1, %g2, %g1 ; \
150	ble,a	%xcc, tl1_kstack_fault ; \
151	 inc	16, ASP_REG ; \
152	set	KSTACK_PAGES * PAGE_SIZE, %g2 ; \
153	cmp	%g1, %g2 ; \
154	bgt,a	%xcc, tl1_kstack_fault ; \
155	 inc	16, ASP_REG ; \
156	ldx	[ASP_REG + 8], %g2 ; \
157	ldx	[ASP_REG + 0], %g1 ; \
158	inc	16, ASP_REG
159
160ENTRY(tl1_kstack_fault)
161	rdpr	%tl, %g1
162	cmp	%g1, 3
163	beq	%xcc, 1f
164	 nop
165	blt	%xcc, 2f
166	 nop
167	sir
168
1691:
170#if KTR_COMPILE & KTR_TRAP
171	CATR(KTR_TRAP, "tl1_kstack_fault: tl=%#lx tpc=%#lx tnpc=%#lx"
172	    , %g1, %g2, %g3, 7, 8, 9)
173	rdpr	%tl, %g2
174	stx	%g2, [%g1 + KTR_PARM1]
175	rdpr	%tpc, %g2
176	stx	%g2, [%g1 + KTR_PARM1]
177	rdpr	%tnpc, %g2
178	stx	%g2, [%g1 + KTR_PARM1]
1799:
180#endif
181	wrpr	%g0, 2, %tl
182
1832:
184#if KTR_COMPILE & KTR_TRAP
185	CATR(KTR_TRAP,
186	    "tl1_kstack_fault: sp=%#lx ks=%#lx cr=%#lx cs=%#lx ow=%#lx ws=%#lx"
187	    , %g1, %g2, %g3, 7, 8, 9)
188	add	%sp, SPOFF, %g2
189	stx	%g2, [%g1 + KTR_PARM1]
190	ldx	[PCPU(CURTHREAD)], %g2
191	ldx	[%g2 + TD_KSTACK], %g2
192	stx	%g2, [%g1 + KTR_PARM2]
193	rdpr	%canrestore, %g2
194	stx	%g2, [%g1 + KTR_PARM3]
195	rdpr	%cansave, %g2
196	stx	%g2, [%g1 + KTR_PARM4]
197	rdpr	%otherwin, %g2
198	stx	%g2, [%g1 + KTR_PARM5]
199	rdpr	%wstate, %g2
200	stx	%g2, [%g1 + KTR_PARM6]
2019:
202#endif
203
204	wrpr	%g0, 0, %canrestore
205	wrpr	%g0, 6, %cansave
206	wrpr	%g0, 0, %otherwin
207	wrpr	%g0, WSTATE_KERNEL, %wstate
208
209	SET(panic_stack + PANIC_STACK_PAGES * PAGE_SIZE, %g2, %g1)
210	sub	%g1, SPOFF + CCFSZ, %sp
211	clr	%fp
212
213	rdpr	%pil, %o1
214	b	%xcc, tl1_trap
215	 mov	T_KSTACK_FAULT | T_KERNEL, %o0
216END(tl1_kstack_fault)
217
218/*
219 * Magic to resume from a spill or fill trap.  If we get an alignment or an
220 * mmu fault during a spill or a fill, this macro will detect the fault and
221 * resume at a set instruction offset in the trap handler.
222 *
223 * To check if the previous trap was a spill/fill we convert the trapped pc
224 * to a trap type and verify that it is in the range of spill/fill vectors.
225 * The spill/fill vectors are types 0x80-0xff and 0x280-0x2ff, masking off the
226 * tl bit allows us to detect both ranges with one test.
227 *
228 * This is:
229 *	0x80 <= (((%tpc - %tba) >> 5) & ~0x200) < 0x100
230 *
231 * To calculate the new pc we take advantage of the xor feature of wrpr.
232 * Forcing all the low bits of the trapped pc on we can produce any offset
233 * into the spill/fill vector.  The size of a spill/fill trap vector is 0x80.
234 *
235 *	0x7f ^ 0x1f == 0x60
236 *	0x1f == (0x80 - 0x60) - 1
237 *
238 * Which are the offset and xor value used to resume from alignment faults.
239 */
240
241/*
242 * Determine if we have trapped inside of a spill/fill vector, and if so resume
243 * at a fixed instruction offset in the trap vector.  Must be called on
244 * alternate globals.
245 */
246#define	RESUME_SPILLFILL_MAGIC(stxa_g0_sfsr, xor) \
247	dec	16, ASP_REG ; \
248	stx	%g1, [ASP_REG + 0] ; \
249	stx	%g2, [ASP_REG + 8] ; \
250	rdpr	%tpc, %g1 ; \
251	ERRATUM50(%g1) ; \
252	rdpr	%tba, %g2 ; \
253	sub	%g1, %g2, %g2 ; \
254	srlx	%g2, 5, %g2 ; \
255	andn	%g2, 0x200, %g2 ; \
256	cmp	%g2, 0x80 ; \
257	blu,pt	%xcc, 9f ; \
258	 cmp	%g2, 0x100 ; \
259	bgeu,pt	%xcc, 9f ; \
260	 or	%g1, 0x7f, %g1 ; \
261	wrpr	%g1, xor, %tnpc ; \
262	stxa_g0_sfsr ; \
263	ldx	[ASP_REG + 8], %g2 ; \
264	ldx	[ASP_REG + 0], %g1 ; \
265	inc	16, ASP_REG ; \
266	done ; \
2679:	ldx	[ASP_REG + 8], %g2 ; \
268	ldx	[ASP_REG + 0], %g1 ; \
269	inc	16, ASP_REG
270
271/*
272 * For certain faults we need to clear the sfsr mmu register before returning.
273 */
274#define	RSF_CLR_SFSR \
275	wr	%g0, ASI_DMMU, %asi ; \
276	stxa	%g0, [%g0 + AA_DMMU_SFSR] %asi
277
278#define	RSF_XOR(off)	((0x80 - off) - 1)
279
280/*
281 * Instruction offsets in spill and fill trap handlers for handling certain
282 * nested traps, and corresponding xor constants for wrpr.
283 */
284#define	RSF_OFF_ALIGN	0x60
285#define	RSF_OFF_MMU	0x70
286
287#define	RESUME_SPILLFILL_ALIGN \
288	RESUME_SPILLFILL_MAGIC(RSF_CLR_SFSR, RSF_XOR(RSF_OFF_ALIGN))
289#define	RESUME_SPILLFILL_MMU \
290	RESUME_SPILLFILL_MAGIC(EMPTY, RSF_XOR(RSF_OFF_MMU))
291#define	RESUME_SPILLFILL_MMU_CLR_SFSR \
292	RESUME_SPILLFILL_MAGIC(RSF_CLR_SFSR, RSF_XOR(RSF_OFF_MMU))
293
294/*
295 * Constant to add to %tnpc when taking a fill trap just before returning to
296 * user mode.
297 */
298#define	RSF_FILL_INC	tl0_ret_fill_end - tl0_ret_fill
299
300/*
301 * Retry a spill or fill with a different wstate due to an alignment fault.
302 * We may just be using the wrong stack offset.
303 */
304#define	RSF_ALIGN_RETRY(ws) \
305	wrpr	%g0, (ws), %wstate ; \
306	retry ; \
307	.align	16
308
309/*
310 * Generate a T_SPILL or T_FILL trap if the window operation fails.
311 */
312#define	RSF_TRAP(type) \
313	b	%xcc, tl0_sftrap ; \
314	 mov	type, %g2 ; \
315	.align	16
316
317/*
318 * Game over if the window operation fails.
319 */
320#define	RSF_FATAL(type) \
321	b	%xcc, rsf_fatal ; \
322	 mov	type, %g2 ; \
323	.align	16
324
325/*
326 * Magic to resume from a failed fill a few instructions after the corrsponding
327 * restore.  This is used on return from the kernel to usermode.
328 */
329#define	RSF_FILL_MAGIC \
330	rdpr	%tnpc, %g1 ; \
331	add	%g1, RSF_FILL_INC, %g1 ; \
332	wrpr	%g1, 0, %tnpc ; \
333	done ; \
334	.align	16
335
336/*
337 * Spill to the pcb if a spill to the user stack in kernel mode fails.
338 */
339#define	RSF_SPILL_TOPCB \
340	b,a	%xcc, tl1_spill_topcb ; \
341	 nop ; \
342	.align	16
343
344ENTRY(rsf_fatal)
345#if KTR_COMPILE & KTR_TRAP
346	CATR(KTR_TRAP, "rsf_fatal: bad window trap tt=%#lx type=%#lx"
347	    , %g1, %g3, %g4, 7, 8, 9)
348	rdpr	%tt, %g3
349	stx	%g3, [%g1 + KTR_PARM1]
350	stx	%g2, [%g1 + KTR_PARM2]
3519:
352#endif
353
354	KSTACK_CHECK
355
356	sir
357END(rsf_fatal)
358
359	.comm	intrnames, NIV * 8
360	.comm	eintrnames, 0
361
362	.comm	intrcnt, NIV * 8
363	.comm	eintrcnt, 0
364
365/*
366 * Trap table and associated macros
367 *
368 * Due to its size a trap table is an inherently hard thing to represent in
369 * code in a clean way.  There are approximately 1024 vectors, of 8 or 32
370 * instructions each, many of which are identical.  The way that this is
371 * layed out is the instructions (8 or 32) for the actual trap vector appear
372 * as an AS macro.  In general this code branches to tl0_trap or tl1_trap,
373 * but if not supporting code can be placed just after the definition of the
374 * macro.  The macros are then instantiated in a different section (.trap),
375 * which is setup to be placed by the linker at the beginning of .text, and the
376 * code around the macros is moved to the end of trap table.  In this way the
377 * code that must be sequential in memory can be split up, and located near
378 * its supporting code so that it is easier to follow.
379 */
380
381	/*
382	 * Clean window traps occur when %cleanwin is zero to ensure that data
383	 * is not leaked between address spaces in registers.
384	 */
385	.macro	clean_window
386	clr	%o0
387	clr	%o1
388	clr	%o2
389	clr	%o3
390	clr	%o4
391	clr	%o5
392	clr	%o6
393	clr	%o7
394	clr	%l0
395	clr	%l1
396	clr	%l2
397	clr	%l3
398	clr	%l4
399	clr	%l5
400	clr	%l6
401	rdpr	%cleanwin, %l7
402	inc	%l7
403	wrpr	%l7, 0, %cleanwin
404	clr	%l7
405	retry
406	.align	128
407	.endm
408
409	/*
410	 * Stack fixups for entry from user mode.  We are still running on the
411	 * user stack, and with its live registers, so we must save soon.  We
412	 * are on alternate globals so we do have some registers.  Set the
413	 * transitional window state, and do the save.  If this traps we
414	 * we attempt to spill a window to the user stack.  If this fails,
415	 * we spill the window to the pcb and continue.  Spilling to the pcb
416	 * must not fail.
417	 *
418	 * NOTE: Must be called with alternate globals and clobbers %g1.
419	 */
420
421	.macro	tl0_split
422	rdpr	%wstate, %g1
423	wrpr	%g1, WSTATE_TRANSITION, %wstate
424	save
425	.endm
426
427	.macro	tl0_setup	type
428	tl0_split
429	b	%xcc, tl0_trap
430	 mov	\type, %o0
431	.endm
432
433	/*
434	 * Generic trap type.  Call trap() with the specified type.
435	 */
436	.macro	tl0_gen		type
437	tl0_setup \type
438	.align	32
439	.endm
440
441	/*
442	 * This is used to suck up the massive swaths of reserved trap types.
443	 * Generates count "reserved" trap vectors.
444	 */
445	.macro	tl0_reserved	count
446	.rept	\count
447	tl0_gen	T_RESERVED
448	.endr
449	.endm
450
451	.macro	tl0_fp_restore
452	wr	%g0, FPRS_FEF, %fprs
453	wr	%g0, ASI_BLK_S, %asi
454	ldda	[PCB_REG + PCB_FPSTATE + FP_FB0] %asi, %f0
455	ldda	[PCB_REG + PCB_FPSTATE + FP_FB1] %asi, %f16
456	ldda	[PCB_REG + PCB_FPSTATE + FP_FB2] %asi, %f32
457	ldda	[PCB_REG + PCB_FPSTATE + FP_FB3] %asi, %f48
458	membar	#Sync
459	done
460	.align	32
461	.endm
462
463	.macro	tl0_insn_excptn
464	wr	%g0, ASI_IMMU, %asi
465	rdpr	%tpc, %g3
466	ldxa	[%g0 + AA_IMMU_SFSR] %asi, %g4
467	stxa	%g0, [%g0 + AA_IMMU_SFSR] %asi
468	membar	#Sync
469	b	%xcc, tl0_sfsr_trap
470	 mov	T_INSTRUCTION_EXCEPTION, %g2
471	.align	32
472	.endm
473
474	.macro	tl0_data_excptn
475	wr	%g0, ASI_DMMU, %asi
476	ldxa	[%g0 + AA_DMMU_SFAR] %asi, %g3
477	ldxa	[%g0 + AA_DMMU_SFSR] %asi, %g4
478	stxa	%g0, [%g0 + AA_DMMU_SFSR] %asi
479	membar	#Sync
480	b	%xcc, tl0_sfsr_trap
481	 mov	T_DATA_EXCEPTION, %g2
482	.align	32
483	.endm
484
485	.macro	tl0_align
486	wr	%g0, ASI_DMMU, %asi
487	ldxa	[%g0 + AA_DMMU_SFAR] %asi, %g3
488	ldxa	[%g0 + AA_DMMU_SFSR] %asi, %g4
489	stxa	%g0, [%g0 + AA_DMMU_SFSR] %asi
490	membar	#Sync
491	b	%xcc, tl0_sfsr_trap
492	 mov	T_MEM_ADDRESS_NOT_ALIGNED, %g2
493	.align	32
494	.endm
495
496ENTRY(tl0_sfsr_trap)
497	tl0_split
498	mov	%g3, %o4
499	mov	%g4, %o5
500	b	%xcc, tl0_trap
501	 mov	%g2, %o0
502END(tl0_sfsr_trap)
503
504	.macro	tl0_intr level, mask
505	wrpr	%g0, \level, %pil
506	set	\mask, %g1
507	wr	%g1, 0, %asr21
508	tl0_split
509	b	%xcc, tl0_intr
510	 mov	\level, %o2
511	.align	32
512	.endm
513
514#define	INTR(level, traplvl)						\
515	tl ## traplvl ## _intr	level, 1 << level
516
517#define	TICK(traplvl) \
518	tl ## traplvl ## _intr	PIL_TICK, 1
519
520#define	INTR_LEVEL(tl)							\
521	INTR(1, tl) ;							\
522	INTR(2, tl) ;							\
523	INTR(3, tl) ;							\
524	INTR(4, tl) ;							\
525	INTR(5, tl) ;							\
526	INTR(6, tl) ;							\
527	INTR(7, tl) ;							\
528	INTR(8, tl) ;							\
529	INTR(9, tl) ;							\
530	INTR(10, tl) ;							\
531	INTR(11, tl) ;							\
532	INTR(12, tl) ;							\
533	INTR(13, tl) ;							\
534	TICK(tl) ;							\
535	INTR(15, tl) ;
536
537	.macro	tl0_intr_level
538	INTR_LEVEL(0)
539	.endm
540
541	.macro	tl0_intr_vector
542	b,a	%xcc, intr_enqueue
543	.align	32
544	.endm
545
546	.macro	tl0_immu_miss
547	/*
548	 * Force kernel store order.
549	 */
550	wrpr	%g0, PSTATE_MMU, %pstate
551
552	/*
553	 * Extract the 8KB pointer.
554	 */
555	ldxa	[%g0] ASI_IMMU_TSB_8KB_PTR_REG, %g6
556	srax	%g6, TTE_SHIFT, %g6
557
558	/*
559	 * Compute the tte address in the primary user tsb.
560	 */
561	and	%g6, (1 << TSB_BUCKET_ADDRESS_BITS) - 1, %g1
562	sllx	%g1, TSB_BUCKET_SHIFT + TTE_SHIFT, %g1
563	add	%g1, TSB_REG, %g1
564
565	/*
566	 * Compute low bits of faulting va to check inside bucket loop.
567	 */
568	and	%g6, TD_VA_LOW_MASK >> TD_VA_LOW_SHIFT, %g2
569	sllx	%g2, TD_VA_LOW_SHIFT, %g2
570	or	%g2, TD_EXEC, %g2
571
572	/*
573	 * Load the tte tag target.
574	 */
575	ldxa	[%g0] ASI_IMMU_TAG_TARGET_REG, %g6
576
577	/*
578	 * Load mask for tte data check.
579	 */
580	mov	TD_VA_LOW_MASK >> TD_VA_LOW_SHIFT, %g3
581	sllx	%g3, TD_VA_LOW_SHIFT, %g3
582	or	%g3, TD_EXEC, %g3
583
584	/*
585	 * Loop over the ttes in this bucket
586	 */
587
588	/*
589	 * Load the tte.
590	 */
5911:	ldda	[%g1] ASI_NUCLEUS_QUAD_LDD, %g4 /*, %g5 */
592
593	/*
594	 * Compare the tag.
595	 */
596	cmp	%g4, %g6
597	bne,pn	%xcc, 2f
598	 EMPTY
599
600	/*
601	 * Compare the data.
602	 */
603	 xor	%g2, %g5, %g4
604	brgez,pn %g5, 2f
605	 andcc	%g3, %g4, %g0
606	bnz,pn	%xcc, 2f
607	 EMPTY
608
609	/*
610	 * We matched a tte, load the tlb.
611	 */
612
613	/*
614	 * Set the reference bit, if it's currently clear.
615	 */
616	 andcc	%g5, TD_REF, %g0
617	bz,a,pn	%xcc, tl0_immu_miss_set_ref
618	 nop
619
620	/*
621	 * Load the tte data into the tlb and retry the instruction.
622	 */
623	stxa	%g5, [%g0] ASI_ITLB_DATA_IN_REG
624	retry
625
626	/*
627	 * Check the low bits to see if we've finished the bucket.
628	 */
6292:	add	%g1, 1 << TTE_SHIFT, %g1
630	andcc	%g1, (1 << (TSB_BUCKET_SHIFT + TTE_SHIFT)) - 1, %g0
631	bnz,a,pt %xcc, 1b
632	 nop
633	b,a	%xcc, tl0_immu_miss_trap
634	.align	128
635	.endm
636
637ENTRY(tl0_immu_miss_set_ref)
638	/*
639	 * Set the reference bit.
640	 */
641	TTE_SET_REF(%g1, %g2, %g3)
642
643	/*
644	 * May have become invalid, in which case start over.
645	 */
646	brgez,pn %g2, 2f
647	 or	%g2, TD_REF, %g2
648
649	/*
650	 * Load the tte data into the tlb and retry the instruction.
651	 */
652	stxa	%g2, [%g0] ASI_ITLB_DATA_IN_REG
6532:	retry
654END(tl0_immu_miss_set_ref)
655
656ENTRY(tl0_immu_miss_trap)
657	/*
658	 * Switch to alternate globals.
659	 */
660	wrpr	%g0, PSTATE_ALT, %pstate
661
662	/*
663	 * Load the tar, sfar and sfsr aren't valid.
664	 */
665	wr	%g0, ASI_IMMU, %asi
666	ldxa	[%g0 + AA_DMMU_TAR] %asi, %g3
667
668	/*
669	 * Save the mmu registers on the stack, and call common trap code.
670	 */
671	tl0_split
672	mov	%g3, %o3
673	b	%xcc, tl0_trap
674	 mov	T_INSTRUCTION_MISS, %o0
675END(tl0_immu_miss_trap)
676
677	.macro	dmmu_miss_user
678	/*
679	 * Extract the 8KB pointer and convert to an index.
680	 */
681	ldxa	[%g0] ASI_DMMU_TSB_8KB_PTR_REG, %g6
682	srax	%g6, TTE_SHIFT, %g6
683
684	/*
685	 * Compute the tte bucket address.
686	 */
687	and	%g6, (1 << TSB_BUCKET_ADDRESS_BITS) - 1, %g1
688	sllx	%g1, TSB_BUCKET_SHIFT + TTE_SHIFT, %g1
689	add	%g1, TSB_REG, %g1
690
691	/*
692	 * Compute low bits of faulting va to check inside bucket loop.
693	 */
694	and	%g6, TD_VA_LOW_MASK >> TD_VA_LOW_SHIFT, %g2
695	sllx	%g2, TD_VA_LOW_SHIFT, %g2
696
697	/*
698	 * Preload the tte tag target.
699	 */
700	ldxa	[%g0] ASI_DMMU_TAG_TARGET_REG, %g6
701
702	/*
703	 * Load mask for tte data check.
704	 */
705	mov	TD_VA_LOW_MASK >> TD_VA_LOW_SHIFT, %g3
706	sllx	%g3, TD_VA_LOW_SHIFT, %g3
707
708	/*
709	 * Loop over the ttes in this bucket
710	 */
711
712	/*
713	 * Load the tte.
714	 */
7151:	ldda	[%g1] ASI_NUCLEUS_QUAD_LDD, %g4 /*, %g5 */
716
717	/*
718	 * Compare the tag.
719	 */
720	cmp	%g4, %g6
721	bne,pn	%xcc, 2f
722	 EMPTY
723
724	/*
725	 * Compare the data.
726	 */
727	 xor	%g2, %g5, %g4
728	brgez,pn %g5, 2f
729	 andcc	%g3, %g4, %g0
730	bnz,pn	%xcc, 2f
731	 EMPTY
732
733	/*
734	 * We matched a tte, load the tlb.
735	 */
736
737	/*
738	 * Set the reference bit, if it's currently clear.
739	 */
740	 andcc	%g5, TD_REF, %g0
741	bz,a,pn	%xcc, dmmu_miss_user_set_ref
742	 nop
743
744	/*
745	 * Load the tte data into the tlb and retry the instruction.
746	 */
747	stxa	%g5, [%g0] ASI_DTLB_DATA_IN_REG
748	retry
749
750	/*
751	 * Check the low bits to see if we've finished the bucket.
752	 */
7532:	add	%g1, 1 << TTE_SHIFT, %g1
754	andcc	%g1, (1 << (TSB_BUCKET_SHIFT + TTE_SHIFT)) - 1, %g0
755	bnz,a,pt %xcc, 1b
756	 nop
757	.endm
758
759ENTRY(dmmu_miss_user_set_ref)
760	/*
761	 * Set the reference bit.
762	 */
763	TTE_SET_REF(%g1, %g2, %g3)
764
765	/*
766	 * May have become invalid, in which case start over.
767	 */
768	brgez,pn %g2, 2f
769	 or	%g2, TD_REF, %g2
770
771	/*
772	 * Load the tte data into the tlb and retry the instruction.
773	 */
774	stxa	%g2, [%g0] ASI_DTLB_DATA_IN_REG
7752:	retry
776END(dmmu_miss_user_set_ref)
777
778	.macro	tl0_dmmu_miss
779	/*
780	 * Force kernel store order.
781	 */
782	wrpr	%g0, PSTATE_MMU, %pstate
783
784	/*
785	 * Try a fast inline lookup of the primary tsb.
786	 */
787	dmmu_miss_user
788
789	/*
790	 * Not in primary tsb, call c code.  Nothing else fits inline.
791	 */
792	b,a	tl0_dmmu_miss_trap
793	.align	128
794	.endm
795
796ENTRY(tl0_dmmu_miss_trap)
797	/*
798	 * Switch to alternate globals.
799	 */
800	wrpr	%g0, PSTATE_ALT, %pstate
801
802	/*
803	 * Load the tar, sfar and sfsr aren't valid.
804	 */
805	wr	%g0, ASI_DMMU, %asi
806	ldxa	[%g0 + AA_DMMU_TAR] %asi, %g3
807
808	/*
809	 * Save the mmu registers on the stack and call common trap code.
810	 */
811	tl0_split
812	mov	%g3, %o3
813	b	%xcc, tl0_trap
814	 mov	T_DATA_MISS, %o0
815END(tl0_dmmu_miss_trap)
816
817	.macro	dmmu_prot_user
818	/*
819	 * Extract the 8KB pointer and convert to an index.
820	 */
821	ldxa	[%g0] ASI_DMMU_TSB_8KB_PTR_REG, %g6
822	srax	%g6, TTE_SHIFT, %g6
823
824	/*
825	 * Compute the tte bucket address.
826	 */
827	and	%g6, (1 << TSB_BUCKET_ADDRESS_BITS) - 1, %g1
828	sllx	%g1, TSB_BUCKET_SHIFT + TTE_SHIFT, %g1
829	add	%g1, TSB_REG, %g1
830
831	/*
832	 * Compute low bits of faulting va to check inside bucket loop.
833	 */
834	and	%g6, TD_VA_LOW_MASK >> TD_VA_LOW_SHIFT, %g2
835	sllx	%g2, TD_VA_LOW_SHIFT, %g2
836	or	%g2, TD_SW, %g2
837
838	/*
839	 * Preload the tte tag target.
840	 */
841	ldxa	[%g0] ASI_DMMU_TAG_TARGET_REG, %g6
842
843	/*
844	 * Load mask for tte data check.
845	 */
846	mov	TD_VA_LOW_MASK >> TD_VA_LOW_SHIFT, %g3
847	sllx	%g3, TD_VA_LOW_SHIFT, %g3
848	or	%g3, TD_SW, %g3
849
850	/*
851	 * Loop over the ttes in this bucket
852	 */
853
854	/*
855	 * Load the tte.
856	 */
8571:	ldda	[%g1] ASI_NUCLEUS_QUAD_LDD, %g4 /*, %g5 */
858
859	/*
860	 * Compare the tag.
861	 */
862	cmp	%g4, %g6
863	bne,pn	%xcc, 2f
864	 EMPTY
865
866	/*
867	 * Compare the data.
868	 */
869	 xor	%g2, %g5, %g4
870	brgez,pn %g5, 2f
871	 andcc	%g3, %g4, %g0
872	bnz,a,pn %xcc, 2f
873	 nop
874
875	b,a	%xcc, dmmu_prot_set_w
876	 nop
877
878	/*
879	 * Check the low bits to see if we've finished the bucket.
880	 */
8812:	add	%g1, 1 << TTE_SHIFT, %g1
882	andcc	%g1, (1 << (TSB_BUCKET_SHIFT + TTE_SHIFT)) - 1, %g0
883	bnz,a,pn %xcc, 1b
884	 nop
885	.endm
886
887	.macro	tl0_dmmu_prot
888	/*
889	 * Force kernel store order.
890	 */
891	wrpr	%g0, PSTATE_MMU, %pstate
892
893	/*
894	 * Try a fast inline lookup of the tsb.
895	 */
896	dmmu_prot_user
897
898	/*
899	 * Not in tsb.  Call c code.
900	 */
901	b,a	%xcc, tl0_dmmu_prot_trap
902	 nop
903	.align	128
904	.endm
905
906ENTRY(dmmu_prot_set_w)
907	/*
908	 * Set the hardware write bit in the tte.
909	 */
910	TTE_SET_W(%g1, %g2, %g3)
911
912	/*
913	 * Delete the old TLB entry.
914	 */
915	wr	%g0, ASI_DMMU, %asi
916	ldxa	[%g0 + AA_DMMU_TAR] %asi, %g1
917	srlx	%g1, PAGE_SHIFT, %g1
918	sllx	%g1, PAGE_SHIFT, %g1
919	stxa	%g0, [%g1] ASI_DMMU_DEMAP
920	stxa	%g0, [%g0 + AA_DMMU_SFSR] %asi
921
922	brgez,pn %g2, 1f
923	 or	%g2, TD_W, %g2
924
925	/*
926	 * Load the tte data into the tlb and retry the instruction.
927	 */
928	stxa	%g2, [%g0] ASI_DTLB_DATA_IN_REG
9291:	retry
930END(dmmu_prot_set_w)
931
932ENTRY(tl0_dmmu_prot_trap)
933	/*
934	 * Switch to alternate globals.
935	 */
936	wrpr	%g0, PSTATE_ALT, %pstate
937
938	/*
939	 * Load the tar, sfar and sfsr.
940	 */
941	wr	%g0, ASI_DMMU, %asi
942	ldxa	[%g0 + AA_DMMU_TAR] %asi, %g2
943	ldxa	[%g0 + AA_DMMU_SFAR] %asi, %g3
944	ldxa	[%g0 + AA_DMMU_SFSR] %asi, %g4
945	stxa	%g0, [%g0 + AA_DMMU_SFSR] %asi
946	membar	#Sync
947
948	/*
949	 * Save the mmu registers on the stack and call common trap code.
950	 */
951	tl0_split
952	mov	%g2, %o3
953	mov	%g3, %o4
954	mov	%g4, %o5
955	b	%xcc, tl0_trap
956	 mov	T_DATA_PROTECTION, %o0
957END(tl0_dmmu_prot_trap)
958
959	.macro	tl0_spill_0_n
960	andcc	%sp, 1, %g0
961	bz,pn	%xcc, 2f
962	 wr	%g0, ASI_AIUP, %asi
9631:	SPILL(stxa, %sp + SPOFF, 8, %asi)
964	saved
965	wrpr	%g0, WSTATE_ASSUME64, %wstate
966	retry
967	.align	32
968	RSF_TRAP(T_SPILL)
969	RSF_TRAP(T_SPILL)
970	.endm
971
972	.macro	tl0_spill_1_n
973	andcc	%sp, 1, %g0
974	bnz,pt	%xcc, 1b
975	 wr	%g0, ASI_AIUP, %asi
9762:	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
977	SPILL(stwa, %sp, 4, %asi)
978	saved
979	wrpr	%g0, WSTATE_ASSUME32, %wstate
980	retry
981	.align	32
982	RSF_TRAP(T_SPILL)
983	RSF_TRAP(T_SPILL)
984	.endm
985
986	.macro	tl0_spill_2_n
987	wr	%g0, ASI_AIUP, %asi
988	SPILL(stxa, %sp + SPOFF, 8, %asi)
989	saved
990	retry
991	.align	32
992	RSF_ALIGN_RETRY(WSTATE_TEST32)
993	RSF_TRAP(T_SPILL)
994	.endm
995
996	.macro	tl0_spill_3_n
997	wr	%g0, ASI_AIUP, %asi
998	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
999	SPILL(stwa, %sp, 4, %asi)
1000	saved
1001	retry
1002	.align	32
1003	RSF_ALIGN_RETRY(WSTATE_TEST64)
1004	RSF_TRAP(T_SPILL)
1005	.endm
1006
1007	.macro	tl0_fill_0_n
1008	andcc	%sp, 1, %g0
1009	bz,pn	%xcc, 2f
1010	 wr	%g0, ASI_AIUP, %asi
10111:	FILL(ldxa, %sp + SPOFF, 8, %asi)
1012	restored
1013	wrpr	%g0, WSTATE_ASSUME64, %wstate
1014	retry
1015	.align	32
1016	RSF_TRAP(T_FILL)
1017	RSF_TRAP(T_FILL)
1018	.endm
1019
1020	.macro	tl0_fill_1_n
1021	andcc	%sp, 1, %g0
1022	bnz	%xcc, 1b
1023	 wr	%g0, ASI_AIUP, %asi
10242:	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
1025	FILL(lduwa, %sp, 4, %asi)
1026	restored
1027	wrpr	%g0, WSTATE_ASSUME32, %wstate
1028	retry
1029	.align	32
1030	RSF_TRAP(T_FILL)
1031	RSF_TRAP(T_FILL)
1032	.endm
1033
1034	.macro	tl0_fill_2_n
1035	wr	%g0, ASI_AIUP, %asi
1036	FILL(ldxa, %sp + SPOFF, 8, %asi)
1037	restored
1038	retry
1039	.align	32
1040	RSF_ALIGN_RETRY(WSTATE_TEST32)
1041	RSF_TRAP(T_FILL)
1042	.endm
1043
1044	.macro	tl0_fill_3_n
1045	wr	%g0, ASI_AIUP, %asi
1046	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
1047	FILL(lduwa, %sp, 4, %asi)
1048	restored
1049	retry
1050	.align	32
1051	RSF_ALIGN_RETRY(WSTATE_TEST64)
1052	RSF_TRAP(T_FILL)
1053	.endm
1054
1055ENTRY(tl0_sftrap)
1056	rdpr	%tstate, %g1
1057	and	%g1, TSTATE_CWP_MASK, %g1
1058	wrpr	%g1, 0, %cwp
1059	tl0_split
1060	b	%xcc, tl0_trap
1061	 mov	%g2, %o0
1062END(tl0_sftrap)
1063
1064	.macro	tl0_spill_bad	count
1065	.rept	\count
1066	sir
1067	.align	128
1068	.endr
1069	.endm
1070
1071	.macro	tl0_fill_bad	count
1072	.rept	\count
1073	sir
1074	.align	128
1075	.endr
1076	.endm
1077
1078	.macro	tl0_syscall
1079	tl0_split
1080	b	%xcc, tl0_syscall
1081	 mov	T_SYSCALL, %o0
1082	.align	32
1083	.endm
1084
1085	.macro	tl0_soft	count
1086	.rept	\count
1087	tl0_gen	T_SOFT
1088	.endr
1089	.endm
1090
1091	.macro	tl1_kstack
1092	save	%sp, -CCFSZ, %sp
1093	.endm
1094
1095	.macro	tl1_setup	type
1096	tl1_kstack
1097	rdpr	%pil, %o1
1098	b	%xcc, tl1_trap
1099	 mov	\type | T_KERNEL, %o0
1100	.endm
1101
1102	.macro	tl1_gen		type
1103	tl1_setup \type
1104	.align	32
1105	.endm
1106
1107	.macro	tl1_reserved	count
1108	.rept	\count
1109	tl1_gen	T_RESERVED
1110	.endr
1111	.endm
1112
1113	.macro	tl1_insn_excptn
1114	wr	%g0, ASI_IMMU, %asi
1115	rdpr	%tpc, %g3
1116	ldxa	[%g0 + AA_IMMU_SFSR] %asi, %g4
1117	stxa	%g0, [%g0 + AA_IMMU_SFSR] %asi
1118	membar	#Sync
1119	b	%xcc, tl1_insn_exceptn_trap
1120	 mov	T_INSTRUCTION_EXCEPTION | T_KERNEL, %g2
1121	.align	32
1122	.endm
1123
1124ENTRY(tl1_insn_exceptn_trap)
1125	tl1_kstack
1126	rdpr	%pil, %o1
1127	mov	%g3, %o4
1128	mov	%g4, %o5
1129	b	%xcc, tl1_trap
1130	 mov	%g2, %o0
1131END(tl1_insn_exceptn_trap)
1132
1133	.macro	tl1_data_excptn
1134	b,a	%xcc, tl1_data_excptn_trap
1135	 nop
1136	.align	32
1137	.endm
1138
1139ENTRY(tl1_data_excptn_trap)
1140	wrpr	%g0, PSTATE_ALT, %pstate
1141	RESUME_SPILLFILL_MMU_CLR_SFSR
1142	b	%xcc, tl1_sfsr_trap
1143	 mov	T_DATA_EXCEPTION | T_KERNEL, %g2
1144END(tl1_data_excptn_trap)
1145
1146	.macro	tl1_align
1147	b,a	%xcc, tl1_align_trap
1148	 nop
1149	.align	32
1150	.endm
1151
1152ENTRY(tl1_align_trap)
1153	wrpr	%g0, PSTATE_ALT, %pstate
1154	RESUME_SPILLFILL_ALIGN
1155	b	%xcc, tl1_sfsr_trap
1156	 mov	T_MEM_ADDRESS_NOT_ALIGNED | T_KERNEL, %g2
1157END(tl1_data_excptn_trap)
1158
1159ENTRY(tl1_sfsr_trap)
1160	wr	%g0, ASI_DMMU, %asi
1161	ldxa	[%g0 + AA_DMMU_SFAR] %asi, %g3
1162	ldxa	[%g0 + AA_DMMU_SFSR] %asi, %g4
1163	stxa	%g0, [%g0 + AA_DMMU_SFSR] %asi
1164	membar	#Sync
1165
1166	tl1_kstack
1167	rdpr	%pil, %o1
1168	mov	%g3, %o4
1169	mov	%g4, %o5
1170	b	%xcc, tl1_trap
1171	 mov	%g2, %o0
1172END(tl1_sfsr_trap)
1173
1174	.macro	tl1_intr level, mask
1175	tl1_kstack
1176	rdpr	%pil, %o1
1177	wrpr	%g0, \level, %pil
1178	set	\mask, %o2
1179	wr	%o2, 0, %asr21
1180	b	%xcc, tl1_intr
1181	 mov	\level, %o2
1182	.align	32
1183	.endm
1184
1185	.macro	tl1_intr_level
1186	INTR_LEVEL(1)
1187	.endm
1188
1189	.macro	tl1_intr_vector
1190	b,a	intr_enqueue
1191	.align	32
1192	.endm
1193
1194ENTRY(intr_enqueue)
1195	/*
1196	 * Find the head of the queue and advance it.
1197	 */
1198	ldx	[IQ_REG + IQ_HEAD], %g1
1199	add	%g1, 1, %g2
1200	and	%g2, IQ_MASK, %g2
1201	stx	%g2, [IQ_REG + IQ_HEAD]
1202
1203#ifdef INVARIANTS
1204	/*
1205	 * If the new head is the same as the tail, the next interrupt will
1206	 * overwrite unserviced packets.  This is bad.
1207	 */
1208	ldx	[IQ_REG + IQ_TAIL], %g3
1209	cmp	%g3, %g2
1210	be	%xcc, 3f
1211	 nop
1212#endif
1213
1214	/*
1215	 * Find the iqe.
1216	 */
1217	sllx	%g1, IQE_SHIFT, %g1
1218	add	%g1, IQ_REG, %g1
1219
1220	/*
1221	 * Load the interrupt packet from the hardware.
1222	 */
1223	wr	%g0, ASI_SDB_INTR_R, %asi
1224	ldxa	[%g0] ASI_INTR_RECEIVE, %g2
1225	ldxa	[%g0 + AA_SDB_INTR_D0] %asi, %g3
1226	ldxa	[%g0 + AA_SDB_INTR_D1] %asi, %g4
1227	ldxa	[%g0 + AA_SDB_INTR_D2] %asi, %g5
1228	stxa	%g0, [%g0] ASI_INTR_RECEIVE
1229	membar	#Sync
1230
1231	/*
1232	 * Store the tag and first data word in the iqe.  These are always
1233	 * valid.
1234	 */
1235	stw	%g2, [%g1 + IQE_TAG]
1236	stx	%g3, [%g1 + IQE_VEC]
1237
1238	/*
1239	 * Load the function and argument, if not supplied in iqe.
1240	 */
1241	sllx	%g3, IV_SHIFT, %g3
1242	brnz,pn %g4, 1f
1243	 add	%g3, IV_REG, %g3
1244	ldx	[%g3 + IV_FUNC], %g4
1245	ldx	[%g3 + IV_ARG], %g5
1246
1247	/*
1248	 * Save the priority and the two remaining data words in the iqe.
1249	 */
12501:	lduw	[%g3 + IV_PRI], %g3
1251	stw	%g3, [%g1 + IQE_PRI]
1252	stx	%g4, [%g1 + IQE_FUNC]
1253	stx	%g5, [%g1 + IQE_ARG]
1254
1255#if KTR_COMPILE & KTR_INTR
1256	CATR(KTR_INTR, "intr_enqueue: head=%d tail=%d pri=%p tag=%#x vec=%#x"
1257	    , %g2, %g4, %g5, 7, 8, 9)
1258	ldx	[IQ_REG + IQ_HEAD], %g4
1259	stx	%g4, [%g2 + KTR_PARM1]
1260	ldx	[IQ_REG + IQ_TAIL], %g4
1261	stx	%g4, [%g2 + KTR_PARM2]
1262	lduw	[%g1 + IQE_PRI], %g4
1263	stx	%g4, [%g2 + KTR_PARM3]
1264	lduw	[%g1 + IQE_TAG], %g4
1265	stx	%g4, [%g2 + KTR_PARM4]
1266	ldx	[%g1 + IQE_VEC], %g4
1267	stx	%g4, [%g2 + KTR_PARM5]
12689:
1269#endif
1270
1271	/*
1272	 * Trigger a softint at the level indicated by the priority.
1273	 */
1274	mov	1, %g2
1275	sllx	%g2, %g3, %g2
1276	wr	%g2, 0, %asr20
1277
1278	retry
1279
1280#ifdef INVARIANTS
1281	/*
1282	 * The interrupt queue is about to overflow.  We are in big trouble.
1283	 */
12843:	sir
1285#endif
1286END(intr_enqueue)
1287
1288	.macro	tl1_immu_miss
1289	ldxa	[%g0] ASI_IMMU_TAG_TARGET_REG, %g1
1290	sllx	%g1, TT_VA_SHIFT - (PAGE_SHIFT - TTE_SHIFT), %g2
1291
1292	set	TSB_KERNEL_VA_MASK, %g3
1293	and	%g2, %g3, %g2
1294
1295	ldxa	[%g0] ASI_IMMU_TSB_8KB_PTR_REG, %g4
1296	add	%g2, %g4, %g2
1297
1298	/*
1299	 * Load the tte, check that it's valid and that the tags match.
1300	 */
1301	ldda	[%g2] ASI_NUCLEUS_QUAD_LDD, %g4 /*, %g5 */
1302	brgez,pn %g5, 2f
1303	 cmp	%g4, %g1
1304	bne,pn	%xcc, 2f
1305	 andcc	%g5, TD_EXEC, %g0
1306	bz,pn	%xcc, 2f
1307	 EMPTY
1308
1309	/*
1310	 * Set the refence bit, if its currently clear.
1311	 */
1312	 andcc	%g5, TD_REF, %g0
1313	bnz,pt	%xcc, 1f
1314	 EMPTY
1315
1316	TTE_SET_REF(%g2, %g3, %g4)
1317
1318	/*
1319	 * Load the tte data into the TLB and retry the instruction.
1320	 */
13211:	stxa	%g5, [%g0] ASI_ITLB_DATA_IN_REG
1322	retry
1323
1324	/*
1325	 * Switch to alternate globals.
1326	 */
13272:	wrpr	%g0, PSTATE_ALT, %pstate
1328
1329	wr	%g0, ASI_IMMU, %asi
1330	ldxa	[%g0 + AA_IMMU_TAR] %asi, %g3
1331
1332	tl1_kstack
1333	rdpr	%pil, %o1
1334	mov	%g3, %o3
1335	b	%xcc, tl1_trap
1336	 mov	T_INSTRUCTION_MISS | T_KERNEL, %o0
1337	.align	128
1338	.endm
1339
1340	.macro	tl1_dmmu_miss
1341	ldxa	[%g0] ASI_DMMU_TAG_TARGET_REG, %g1
1342	srlx	%g1, TT_CTX_SHIFT, %g2
1343	brnz,pn	%g2, tl1_dmmu_miss_user
1344	 sllx	%g1, TT_VA_SHIFT - (PAGE_SHIFT - TTE_SHIFT), %g2
1345
1346	set	TSB_KERNEL_VA_MASK, %g3
1347	and	%g2, %g3, %g2
1348
1349	ldxa	[%g0] ASI_DMMU_TSB_8KB_PTR_REG, %g4
1350	add	%g2, %g4, %g2
1351
1352	/*
1353	 * Load the tte, check that it's valid and that the tags match.
1354	 */
1355	ldda	[%g2] ASI_NUCLEUS_QUAD_LDD, %g4 /*, %g5 */
1356	brgez,pn %g5, 2f
1357	 cmp	%g4, %g1
1358	bne,pn	%xcc, 2f
1359	 EMPTY
1360
1361	/*
1362	 * Set the refence bit, if its currently clear.
1363	 */
1364	 andcc	%g5, TD_REF, %g0
1365	bnz,pt	%xcc, 1f
1366	 EMPTY
1367
1368	TTE_SET_REF(%g2, %g3, %g4)
1369
1370	/*
1371	 * Load the tte data into the TLB and retry the instruction.
1372	 */
13731:	stxa	%g5, [%g0] ASI_DTLB_DATA_IN_REG
1374	retry
1375
1376	/*
1377	 * Switch to alternate globals.
1378	 */
13792:	wrpr	%g0, PSTATE_ALT, %pstate
1380
1381	b,a	%xcc, tl1_dmmu_miss_trap
1382	 nop
1383	.align	128
1384	.endm
1385
1386ENTRY(tl1_dmmu_miss_trap)
1387#if KTR_COMPILE & KTR_TRAP
1388	CATR(KTR_TRAP, "tl1_dmmu_miss_trap: tar=%#lx"
1389	    , %g1, %g2, %g3, 7, 8, 9)
1390	mov	AA_DMMU_TAR, %g2
1391	ldxa	[%g2] ASI_DMMU, %g2
1392	stx	%g2, [%g1 + KTR_PARM1]
13939:
1394#endif
1395
1396	KSTACK_CHECK
1397
1398	wr	%g0, ASI_DMMU, %asi
1399	ldxa	[%g0 + AA_DMMU_TAR] %asi, %g3
1400
1401	tl1_kstack
1402	rdpr	%pil, %o1
1403	mov	%g3, %o3
1404	b	%xcc, tl1_trap
1405	 mov	T_DATA_MISS | T_KERNEL, %o0
1406END(tl1_dmmu_miss_trap)
1407
1408ENTRY(tl1_dmmu_miss_user)
1409	/*
1410	 * Try a fast inline lookup of the user tsb.
1411	 */
1412	dmmu_miss_user
1413
1414	/*
1415	 * Switch to alternate globals.
1416	 */
1417	wrpr	%g0, PSTATE_ALT, %pstate
1418
1419	/* Handle faults during window spill/fill. */
1420	RESUME_SPILLFILL_MMU
1421
1422	wr	%g0, ASI_DMMU, %asi
1423	ldxa	[%g0 + AA_DMMU_TAR] %asi, %g3
1424
1425	tl1_kstack
1426	rdpr	%pil, %o1
1427	mov	%g3, %o3
1428	b	%xcc, tl1_trap
1429	 mov	T_DATA_MISS | T_KERNEL, %o0
1430END(tl1_dmmu_miss_user)
1431
1432	.macro	tl1_dmmu_prot
1433	ldxa	[%g0] ASI_DMMU_TAG_TARGET_REG, %g1
1434	srlx	%g1, TT_CTX_SHIFT, %g2
1435	brnz,pn	%g2, tl1_dmmu_prot_user
1436	 sllx	%g1, TT_VA_SHIFT - (PAGE_SHIFT - TTE_SHIFT), %g2
1437
1438	set	TSB_KERNEL_VA_MASK, %g3
1439	and	%g2, %g3, %g2
1440
1441	ldxa	[%g0] ASI_DMMU_TSB_8KB_PTR_REG, %g4
1442	add	%g2, %g4, %g2
1443
1444	/*
1445	 * Load the tte, check that it's valid and that the tags match.
1446	 */
1447	ldda	[%g2] ASI_NUCLEUS_QUAD_LDD, %g4 /*, %g5 */
1448	brgez,pn %g5, 1f
1449	 cmp	%g4, %g1
1450	bne,pn	%xcc, 1f
1451	 andcc	%g5, TD_SW, %g0
1452	bz,pn	%xcc, 1f
1453	 EMPTY
1454
1455	TTE_SET_W(%g2, %g3, %g4)
1456
1457	/*
1458	 * Delete the old TLB entry.
1459	 */
1460	wr	%g0, ASI_DMMU, %asi
1461	ldxa	[%g0 + AA_DMMU_TAR] %asi, %g1
1462	stxa	%g0, [%g1] ASI_DMMU_DEMAP
1463	stxa	%g0, [%g0 + AA_DMMU_SFSR] %asi
1464
1465	/*
1466	 * Load the tte data into the TLB and retry the instruction.
1467	 */
1468	or	%g3, TD_W, %g3
1469	stxa	%g3, [%g0] ASI_DTLB_DATA_IN_REG
1470	retry
1471
14721:	b	%xcc, tl1_dmmu_prot_trap
1473	 wrpr	%g0, PSTATE_ALT, %pstate
1474	.align	128
1475	.endm
1476
1477ENTRY(tl1_dmmu_prot_user)
1478	/*
1479	 * Try a fast inline lookup of the user tsb.
1480	 */
1481	dmmu_prot_user
1482
1483	/*
1484	 * Switch to alternate globals.
1485	 */
1486	wrpr	%g0, PSTATE_ALT, %pstate
1487
1488	/* Handle faults during window spill/fill. */
1489	RESUME_SPILLFILL_MMU_CLR_SFSR
1490
1491	b,a	%xcc, tl1_dmmu_prot_trap
1492	 nop
1493END(tl1_dmmu_prot_user)
1494
1495ENTRY(tl1_dmmu_prot_trap)
1496	/*
1497	 * Load the sfar, sfsr and tar.  Clear the sfsr.
1498	 */
1499	wr	%g0, ASI_DMMU, %asi
1500	ldxa	[%g0 + AA_DMMU_TAR] %asi, %g2
1501	ldxa	[%g0 + AA_DMMU_SFAR] %asi, %g3
1502	ldxa	[%g0 + AA_DMMU_SFSR] %asi, %g4
1503	stxa	%g0, [%g0 + AA_DMMU_SFSR] %asi
1504	membar	#Sync
1505
1506	tl1_kstack
1507	rdpr	%pil, %o1
1508	mov	%g2, %o3
1509	mov	%g3, %o4
1510	mov	%g4, %o5
1511	b	%xcc, tl1_trap
1512	 mov	T_DATA_PROTECTION | T_KERNEL, %o0
1513END(tl1_dmmu_prot_trap)
1514
1515	.macro	tl1_spill_0_n
1516	SPILL(stx, %sp + SPOFF, 8, EMPTY)
1517	saved
1518	retry
1519	.align	32
1520	RSF_FATAL(T_SPILL)
1521	RSF_FATAL(T_SPILL)
1522	.endm
1523
1524	.macro	tl1_spill_4_n
1525	andcc	%sp, 1, %g0
1526	bz,pn	%xcc, 2f
1527	 wr	%g0, ASI_AIUP, %asi
15281:	SPILL(stxa, %sp + SPOFF, 8, %asi)
1529	saved
1530	retry
1531	.align	32
1532	RSF_SPILL_TOPCB
1533	RSF_SPILL_TOPCB
1534	.endm
1535
1536	.macro	tl1_spill_5_n
1537	andcc	%sp, 1, %g0
1538	bnz,pt	%xcc, 1b
1539	 wr	%g0, ASI_AIUP, %asi
15402:	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
1541	SPILL(stwa, %sp, 4, %asi)
1542	saved
1543	retry
1544	.align	32
1545	RSF_SPILL_TOPCB
1546	RSF_SPILL_TOPCB
1547	.endm
1548
1549	.macro	tl1_spill_6_n
1550	wr	%g0, ASI_AIUP, %asi
1551	SPILL(stxa, %sp + SPOFF, 8, %asi)
1552	saved
1553	retry
1554	.align	32
1555	RSF_ALIGN_RETRY(WSTATE_TRANSITION | WSTATE_TEST32)
1556	RSF_SPILL_TOPCB
1557	.endm
1558
1559	.macro	tl1_spill_7_n
1560	wr	%g0, ASI_AIUP, %asi
1561	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
1562	SPILL(stwa, %sp, 4, %asi)
1563	saved
1564	retry
1565	.align	32
1566	RSF_ALIGN_RETRY(WSTATE_TRANSITION | WSTATE_TEST64)
1567	RSF_SPILL_TOPCB
1568	.endm
1569
1570	.macro	tl1_spill_0_o
1571	andcc	%sp, 1, %g0
1572	bz,pn	%xcc, 2f
1573	 wr	%g0, ASI_AIUP, %asi
15741:	SPILL(stxa, %sp + SPOFF, 8, %asi)
1575	saved
1576	wrpr	%g0, WSTATE_ASSUME64 << WSTATE_OTHER_SHIFT, %wstate
1577	retry
1578	.align	32
1579	RSF_SPILL_TOPCB
1580	RSF_SPILL_TOPCB
1581	.endm
1582
1583	.macro	tl1_spill_1_o
1584	andcc	%sp, 1, %g0
1585	bnz,pt	%xcc, 1b
1586	 wr	%g0, ASI_AIUP, %asi
15872:	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
1588	SPILL(stwa, %sp, 4, %asi)
1589	saved
1590	wrpr	%g0, WSTATE_ASSUME32 << WSTATE_OTHER_SHIFT, %wstate
1591	retry
1592	.align	32
1593	RSF_SPILL_TOPCB
1594	RSF_SPILL_TOPCB
1595	.endm
1596
1597	.macro	tl1_spill_2_o
1598	wr	%g0, ASI_AIUP, %asi
1599	SPILL(stxa, %sp + SPOFF, 8, %asi)
1600	saved
1601	retry
1602	.align	32
1603	RSF_ALIGN_RETRY(WSTATE_TEST32 << WSTATE_OTHER_SHIFT)
1604	RSF_SPILL_TOPCB
1605	.endm
1606
1607	.macro	tl1_spill_3_o
1608	wr	%g0, ASI_AIUP, %asi
1609	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
1610	SPILL(stwa, %sp, 4, %asi)
1611	saved
1612	retry
1613	.align	32
1614	RSF_ALIGN_RETRY(WSTATE_TEST64 << WSTATE_OTHER_SHIFT)
1615	RSF_SPILL_TOPCB
1616	.endm
1617
1618	.macro	tl1_fill_0_n
1619	FILL(ldx, %sp + SPOFF, 8, EMPTY)
1620	restored
1621	retry
1622	.align	32
1623	RSF_FATAL(T_FILL)
1624	RSF_FATAL(T_FILL)
1625	.endm
1626
1627	.macro	tl1_fill_4_n
1628	andcc	%sp, 1, %g0
1629	bz,pn	%xcc, 2f
1630	 wr	%g0, ASI_AIUP, %asi
16311:	FILL(ldxa, %sp + SPOFF, 8, %asi)
1632	restored
1633	retry
1634	.align 32
1635	RSF_FILL_MAGIC
1636	RSF_FILL_MAGIC
1637	.endm
1638
1639	.macro	tl1_fill_5_n
1640	andcc	%sp, 1, %g0
1641	bnz,pn	%xcc, 1b
1642	 wr	%g0, ASI_AIUP, %asi
16432:	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
1644	FILL(lduwa, %sp, 4, %asi)
1645	restored
1646	retry
1647	.align 32
1648	RSF_FILL_MAGIC
1649	RSF_FILL_MAGIC
1650	.endm
1651
1652	.macro	tl1_fill_6_n
1653	wr	%g0, ASI_AIUP, %asi
1654	FILL(ldxa, %sp + SPOFF, 8, %asi)
1655	restored
1656	retry
1657	.align 32
1658	RSF_ALIGN_RETRY(WSTATE_TEST32 | WSTATE_TRANSITION)
1659	RSF_FILL_MAGIC
1660	.endm
1661
1662	.macro	tl1_fill_7_n
1663	wr	%g0, ASI_AIUP, %asi
1664	wrpr	%g0, PSTATE_ALT | PSTATE_AM, %pstate
1665	FILL(lduwa, %sp, 4, %asi)
1666	restored
1667	retry
1668	.align 32
1669	RSF_ALIGN_RETRY(WSTATE_TEST64 | WSTATE_TRANSITION)
1670	RSF_FILL_MAGIC
1671	.endm
1672
1673/*
1674 * This is used to spill windows that are still occupied with user
1675 * data on kernel entry to the pcb.
1676 */
1677ENTRY(tl1_spill_topcb)
1678	wrpr	%g0, PSTATE_ALT, %pstate
1679
1680	/* Free some globals for our use. */
1681	dec	24, ASP_REG
1682	stx	%g1, [ASP_REG + 0]
1683	stx	%g2, [ASP_REG + 8]
1684	stx	%g3, [ASP_REG + 16]
1685
1686	ldx	[PCB_REG + PCB_NSAVED], %g1
1687
1688	sllx	%g1, PTR_SHIFT, %g2
1689	add	%g2, PCB_REG, %g2
1690	stx	%sp, [%g2 + PCB_RWSP]
1691
1692	sllx	%g1, RW_SHIFT, %g2
1693	add	%g2, PCB_REG, %g2
1694	SPILL(stx, %g2 + PCB_RW, 8, EMPTY)
1695
1696	inc	%g1
1697	stx	%g1, [PCB_REG + PCB_NSAVED]
1698
1699#if KTR_COMPILE & KTR_TRAP
1700	CATR(KTR_TRAP, "tl1_spill_topcb: pc=%#lx npc=%#lx sp=%#lx nsaved=%d"
1701	   , %g1, %g2, %g3, 7, 8, 9)
1702	rdpr	%tpc, %g2
1703	stx	%g2, [%g1 + KTR_PARM1]
1704	rdpr	%tnpc, %g2
1705	stx	%g2, [%g1 + KTR_PARM2]
1706	stx	%sp, [%g1 + KTR_PARM3]
1707	ldx	[PCB_REG + PCB_NSAVED], %g2
1708	stx	%g2, [%g1 + KTR_PARM4]
17099:
1710#endif
1711
1712	saved
1713
1714	ldx	[ASP_REG + 16], %g3
1715	ldx	[ASP_REG + 8], %g2
1716	ldx	[ASP_REG + 0], %g1
1717	inc	24, ASP_REG
1718	retry
1719END(tl1_spill_topcb)
1720
1721	.macro	tl1_spill_bad	count
1722	.rept	\count
1723	sir
1724	.align	128
1725	.endr
1726	.endm
1727
1728	.macro	tl1_fill_bad	count
1729	.rept	\count
1730	sir
1731	.align	128
1732	.endr
1733	.endm
1734
1735	.macro	tl1_soft	count
1736	.rept	\count
1737	tl1_gen	T_SOFT | T_KERNEL
1738	.endr
1739	.endm
1740
1741	.sect	.trap
1742	.align	0x8000
1743	.globl	tl0_base
1744
1745tl0_base:
1746	tl0_reserved	8				! 0x0-0x7
1747tl0_insn_excptn:
1748	tl0_insn_excptn					! 0x8
1749	tl0_reserved	1				! 0x9
1750tl0_insn_error:
1751	tl0_gen		T_INSTRUCTION_ERROR		! 0xa
1752	tl0_reserved	5				! 0xb-0xf
1753tl0_insn_illegal:
1754	tl0_gen		T_ILLEGAL_INSTRUCTION		! 0x10
1755tl0_priv_opcode:
1756	tl0_gen		T_PRIVILEGED_OPCODE		! 0x11
1757	tl0_reserved	14				! 0x12-0x1f
1758tl0_fp_disabled:
1759	tl0_gen		T_FP_DISABLED			! 0x20
1760tl0_fp_ieee:
1761	tl0_gen		T_FP_EXCEPTION_IEEE_754		! 0x21
1762tl0_fp_other:
1763	tl0_gen		T_FP_EXCEPTION_OTHER		! 0x22
1764tl0_tag_ovflw:
1765	tl0_gen		T_TAG_OFERFLOW			! 0x23
1766tl0_clean_window:
1767	clean_window					! 0x24
1768tl0_divide:
1769	tl0_gen		T_DIVISION_BY_ZERO		! 0x28
1770	tl0_reserved	7				! 0x29-0x2f
1771tl0_data_excptn:
1772	tl0_data_excptn					! 0x30
1773	tl0_reserved	1				! 0x31
1774tl0_data_error:
1775	tl0_gen		T_DATA_ERROR			! 0x32
1776	tl0_reserved	1				! 0x33
1777tl0_align:
1778	tl0_align					! 0x34
1779tl0_align_lddf:
1780	tl0_gen		T_RESERVED			! 0x35
1781tl0_align_stdf:
1782	tl0_gen		T_RESERVED			! 0x36
1783tl0_priv_action:
1784	tl0_gen		T_PRIVILEGED_ACTION		! 0x37
1785	tl0_reserved	9				! 0x38-0x40
1786tl0_intr_level:
1787	tl0_intr_level					! 0x41-0x4f
1788	tl0_reserved	16				! 0x50-0x5f
1789tl0_intr_vector:
1790	tl0_intr_vector					! 0x60
1791tl0_watch_phys:
1792	tl0_gen		T_PA_WATCHPOINT			! 0x61
1793tl0_watch_virt:
1794	tl0_gen		T_VA_WATCHPOINT			! 0x62
1795tl0_ecc:
1796	tl0_gen		T_CORRECTED_ECC_ERROR		! 0x63
1797tl0_immu_miss:
1798	tl0_immu_miss					! 0x64
1799tl0_dmmu_miss:
1800	tl0_dmmu_miss					! 0x68
1801tl0_dmmu_prot:
1802	tl0_dmmu_prot					! 0x6c
1803	tl0_reserved	16				! 0x70-0x7f
1804tl0_spill_0_n:
1805	tl0_spill_0_n					! 0x80
1806tl0_spill_1_n:
1807	tl0_spill_1_n					! 0x84
1808tl0_spill_2_n:
1809	tl0_spill_2_n					! 0x88
1810tl0_spill_3_n:
1811	tl0_spill_3_n					! 0x8c
1812	tl0_spill_bad	12				! 0x90-0xbf
1813tl0_fill_0_n:
1814	tl0_fill_0_n					! 0xc0
1815tl0_fill_1_n:
1816	tl0_fill_1_n					! 0xc4
1817tl0_fill_2_n:
1818	tl0_fill_2_n					! 0xc8
1819tl0_fill_3_n:
1820	tl0_fill_3_n					! 0xcc
1821	tl0_fill_bad	12				! 0xc4-0xff
1822tl0_soft:
1823	tl0_reserved	1				! 0x100
1824	tl0_gen		T_BREAKPOINT			! 0x101
1825	tl0_gen		T_DIVISION_BY_ZERO		! 0x102
1826	tl0_reserved	1				! 0x103
1827	tl0_gen		T_CLEAN_WINDOW			! 0x104
1828	tl0_gen		T_RANGE_CHECK			! 0x105
1829	tl0_gen		T_FIX_ALIGNMENT			! 0x106
1830	tl0_gen		T_INTEGER_OVERFLOW		! 0x107
1831	tl0_reserved	1				! 0x108
1832	tl0_syscall					! 0x109
1833	tl0_fp_restore					! 0x10a
1834	tl0_reserved	5				! 0x10b-0x10f
1835	tl0_gen		T_TRAP_INSTRUCTION_16		! 0x110
1836	tl0_gen		T_TRAP_INSTRUCTION_17		! 0x111
1837	tl0_gen		T_TRAP_INSTRUCTION_18		! 0x112
1838	tl0_gen		T_TRAP_INSTRUCTION_19		! 0x113
1839	tl0_gen		T_TRAP_INSTRUCTION_20		! 0x114
1840	tl0_gen		T_TRAP_INSTRUCTION_21		! 0x115
1841	tl0_gen		T_TRAP_INSTRUCTION_22		! 0x116
1842	tl0_gen		T_TRAP_INSTRUCTION_23		! 0x117
1843	tl0_gen		T_TRAP_INSTRUCTION_24		! 0x118
1844	tl0_gen		T_TRAP_INSTRUCTION_25		! 0x119
1845	tl0_gen		T_TRAP_INSTRUCTION_26		! 0x11a
1846	tl0_gen		T_TRAP_INSTRUCTION_27		! 0x11b
1847	tl0_gen		T_TRAP_INSTRUCTION_28		! 0x11c
1848	tl0_gen		T_TRAP_INSTRUCTION_29		! 0x11d
1849	tl0_gen		T_TRAP_INSTRUCTION_30		! 0x11e
1850	tl0_gen		T_TRAP_INSTRUCTION_31		! 0x11f
1851	tl0_reserved	224				! 0x120-0x1ff
1852
1853tl1_base:
1854	tl1_reserved	8				! 0x200-0x207
1855tl1_insn_excptn:
1856	tl1_insn_excptn					! 0x208
1857	tl1_reserved	1				! 0x209
1858tl1_insn_error:
1859	tl1_gen		T_INSTRUCTION_ERROR		! 0x20a
1860	tl1_reserved	5				! 0x20b-0x20f
1861tl1_insn_illegal:
1862	tl1_gen		T_ILLEGAL_INSTRUCTION		! 0x210
1863tl1_priv_opcode:
1864	tl1_gen		T_PRIVILEGED_OPCODE		! 0x211
1865	tl1_reserved	14				! 0x212-0x21f
1866tl1_fp_disabled:
1867	tl1_gen		T_FP_DISABLED			! 0x220
1868tl1_fp_ieee:
1869	tl1_gen		T_FP_EXCEPTION_IEEE_754		! 0x221
1870tl1_fp_other:
1871	tl1_gen		T_FP_EXCEPTION_OTHER		! 0x222
1872tl1_tag_ovflw:
1873	tl1_gen		T_TAG_OFERFLOW			! 0x223
1874tl1_clean_window:
1875	clean_window					! 0x224
1876tl1_divide:
1877	tl1_gen		T_DIVISION_BY_ZERO		! 0x228
1878	tl1_reserved	7				! 0x229-0x22f
1879tl1_data_excptn:
1880	tl1_data_excptn					! 0x230
1881	tl1_reserved	1				! 0x231
1882tl1_data_error:
1883	tl1_gen		T_DATA_ERROR			! 0x232
1884	tl1_reserved	1				! 0x233
1885tl1_align:
1886	tl1_align					! 0x234
1887tl1_align_lddf:
1888	tl1_gen		T_RESERVED			! 0x235
1889tl1_align_stdf:
1890	tl1_gen		T_RESERVED			! 0x236
1891tl1_priv_action:
1892	tl1_gen		T_PRIVILEGED_ACTION		! 0x237
1893	tl1_reserved	9				! 0x238-0x240
1894tl1_intr_level:
1895	tl1_intr_level					! 0x241-0x24f
1896	tl1_reserved	16				! 0x250-0x25f
1897tl1_intr_vector:
1898	tl1_intr_vector					! 0x260
1899tl1_watch_phys:
1900	tl1_gen		T_PA_WATCHPOINT			! 0x261
1901tl1_watch_virt:
1902	tl1_gen		T_VA_WATCHPOINT			! 0x262
1903tl1_ecc:
1904	tl1_gen		T_CORRECTED_ECC_ERROR		! 0x263
1905tl1_immu_miss:
1906	tl1_immu_miss					! 0x264
1907tl1_dmmu_miss:
1908	tl1_dmmu_miss					! 0x268
1909tl1_dmmu_prot:
1910	tl1_dmmu_prot					! 0x26c
1911	tl1_reserved	16				! 0x270-0x27f
1912tl1_spill_0_n:
1913	tl1_spill_0_n					! 0x280
1914	tl1_spill_bad	3				! 0x284-0x28f
1915tl1_spill_4_n:
1916	tl1_spill_4_n					! 0x290
1917tl1_spill_5_n:
1918	tl1_spill_5_n					! 0x294
1919tl1_spill_6_n:
1920	tl1_spill_6_n					! 0x298
1921tl1_spill_7_n:
1922	tl1_spill_7_n					! 0x29c
1923tl1_spill_0_o:
1924	tl1_spill_0_o					! 0x2a0
1925tl1_spill_1_o:
1926	tl1_spill_1_o					! 0x2a4
1927tl1_spill_2_o:
1928	tl1_spill_2_o					! 0x2a8
1929tl1_spill_3_o:
1930	tl1_spill_3_o					! 0x2ac
1931	tl1_spill_bad	4				! 0x2b0-0x2bf
1932tl1_fill_0_n:
1933	tl1_fill_0_n					! 0x2c0
1934	tl1_fill_bad	3				! 0x2c4-0x2cf
1935tl1_fill_4_n:
1936	tl1_fill_4_n					! 0x2d0
1937tl1_fill_5_n:
1938	tl1_fill_5_n					! 0x2d4
1939tl1_fill_6_n:
1940	tl1_fill_6_n					! 0x2d8
1941tl1_fill_7_n:
1942	tl1_fill_7_n					! 0x2dc
1943	tl1_fill_bad	8				! 0x2e0-0x2ff
1944	tl1_reserved	1				! 0x300
1945tl1_breakpoint:
1946	tl1_gen		T_BREAKPOINT			! 0x301
1947	tl1_gen		T_RSTRWP_PHYS			! 0x302
1948	tl1_gen		T_RSTRWP_VIRT			! 0x303
1949	tl1_reserved	252				! 0x304-0x3ff
1950
1951/*
1952 * User trap entry point.
1953 *
1954 * void tl0_trap(u_int type, u_long o1, u_long o2, u_long tar, u_long sfar,
1955 *		 u_int sfsr)
1956 *
1957 * The following setup has been performed:
1958 *	- the windows have been split and the active user window has been saved
1959 *	  (maybe just to the pcb)
1960 *	- we are on alternate globals and interrupts are disabled
1961 *
1962 * We switch to the kernel stack,  build a trapframe, switch to normal
1963 * globals, enable interrupts and call trap.
1964 *
1965 * NOTE: We must be very careful setting up the per-cpu pointer.  We know that
1966 * it has been pre-set in alternate globals, so we read it from there and setup
1967 * the normal %g7 *before* enabling interrupts.  This avoids any possibility
1968 * of cpu migration and using the wrong pcpup.
1969 */
1970ENTRY(tl0_trap)
1971	/*
1972	 * Force kernel store order.
1973	 */
1974	wrpr	%g0, PSTATE_ALT, %pstate
1975
1976	rdpr	%tstate, %l0
1977	rdpr	%tpc, %l1
1978	rdpr	%tnpc, %l2
1979	rd	%y, %l3
1980	rd	%fprs, %l4
1981	rdpr	%wstate, %l5
1982
1983#if KTR_COMPILE & KTR_TRAP
1984	CATR(KTR_TRAP,
1985	    "tl0_trap: td=%p type=%#x pil=%#lx pc=%#lx npc=%#lx sp=%#lx"
1986	    , %g1, %g2, %g3, 7, 8, 9)
1987	ldx	[PCPU(CURTHREAD)], %g2
1988	stx	%g2, [%g1 + KTR_PARM1]
1989	stx	%o0, [%g1 + KTR_PARM2]
1990	rdpr	%pil, %g2
1991	stx	%g2, [%g1 + KTR_PARM3]
1992	stx	%l1, [%g1 + KTR_PARM4]
1993	stx	%l2, [%g1 + KTR_PARM5]
1994	stx	%i6, [%g1 + KTR_PARM6]
19959:
1996#endif
1997
1998	and	%l5, WSTATE_NORMAL_MASK, %l5
1999
2000	cmp	%o0, UT_MAX
2001	bge,a,pt %xcc, 2f
2002	 nop
2003
2004	ldx	[PCPU(CURTHREAD)], %l6
2005	ldx	[%l6 + TD_PROC], %l6
2006	ldx	[%l6 + P_MD + MD_UTRAP], %l6
2007	brz,pt	%l6, 2f
2008	 sllx	%o0, PTR_SHIFT, %l7
2009	ldx	[%l6 + %l7], %l6
2010	brz,pt	%l6, 2f
2011	 andn	%l0, TSTATE_CWP_MASK, %l7
2012
2013	ldx	[PCB_REG + PCB_NSAVED], %g1
2014	brnz,a,pn %g1, 1f
2015	 mov	T_SPILL, %o0
2016
2017#if KTR_COMPILE & KTR_TRAP
2018	CATR(KTR_TRAP, "tl0_trap: user trap npc=%#lx"
2019	    , %g1, %g2, %g3, 7, 8, 9)
2020	stx	%l6, [%g1 + KTR_PARM1]
20219:
2022#endif
2023
2024	wrpr	%l5, %wstate
2025	wrpr	%l6, %tnpc
2026	rdpr	%cwp, %l6
2027	wrpr	%l6, %l7, %tstate
2028
2029	mov	%l0, %l5
2030	mov	%l1, %l6
2031	mov	%l2, %l7
2032
2033	done
2034
20351:
2036#if KTR_COMPILE & KTR_TRAP
2037	CATR(KTR_TRAP, "tl0_trap: defer user trap npc=%#lx nsaved=%#lx"
2038	    , %g1, %g2, %g3, 7, 8, 9)
2039	stx	%l6, [%g1 + KTR_PARM1]
2040	ldx	[PCB_REG + PCB_NSAVED], %g2
2041	stx	%g2, [%g1 + KTR_PARM2]
20429:
2043#endif
2044
20452:	sllx	%l5, WSTATE_OTHER_SHIFT, %l5
2046	wrpr	%l5, WSTATE_KERNEL, %wstate
2047	rdpr	%canrestore, %l6
2048	wrpr	%l6, 0, %otherwin
2049	wrpr	%g0, 0, %canrestore
2050
2051	sub	PCB_REG, SPOFF + CCFSZ + TF_SIZEOF, %sp
2052
2053	stw	%o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
2054	stx	%o3, [%sp + SPOFF + CCFSZ + TF_TAR]
2055	stx	%o4, [%sp + SPOFF + CCFSZ + TF_SFAR]
2056	stw	%o5, [%sp + SPOFF + CCFSZ + TF_SFSR]
2057
2058	stx	%l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
2059	stx	%l1, [%sp + SPOFF + CCFSZ + TF_TPC]
2060	stx	%l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
2061	stw	%l3, [%sp + SPOFF + CCFSZ + TF_Y]
2062	stb	%l4, [%sp + SPOFF + CCFSZ + TF_FPRS]
2063	stb	%l5, [%sp + SPOFF + CCFSZ + TF_WSTATE]
2064
2065	wr	%g0, FPRS_FEF, %fprs
2066	stx	%fsr, [%sp + SPOFF + CCFSZ + TF_FSR]
2067	wr	%g0, 0, %fprs
2068
2069	mov	PCPU_REG, %o0
2070	wrpr	%g0, PSTATE_NORMAL, %pstate
2071
2072	stx	%g1, [%sp + SPOFF + CCFSZ + TF_G1]
2073	stx	%g2, [%sp + SPOFF + CCFSZ + TF_G2]
2074	stx	%g3, [%sp + SPOFF + CCFSZ + TF_G3]
2075	stx	%g4, [%sp + SPOFF + CCFSZ + TF_G4]
2076	stx	%g5, [%sp + SPOFF + CCFSZ + TF_G5]
2077	stx	%g6, [%sp + SPOFF + CCFSZ + TF_G6]
2078	stx	%g7, [%sp + SPOFF + CCFSZ + TF_G7]
2079
2080	mov	%o0, PCPU_REG
2081	wrpr	%g0, PSTATE_KERNEL, %pstate
2082
2083	stx	%i0, [%sp + SPOFF + CCFSZ + TF_O0]
2084	stx	%i1, [%sp + SPOFF + CCFSZ + TF_O1]
2085	stx	%i2, [%sp + SPOFF + CCFSZ + TF_O2]
2086	stx	%i3, [%sp + SPOFF + CCFSZ + TF_O3]
2087	stx	%i4, [%sp + SPOFF + CCFSZ + TF_O4]
2088	stx	%i5, [%sp + SPOFF + CCFSZ + TF_O5]
2089	stx	%i6, [%sp + SPOFF + CCFSZ + TF_O6]
2090	stx	%i7, [%sp + SPOFF + CCFSZ + TF_O7]
2091
2092.Ltl0_trap_reenter:
2093	call	trap
2094	 add	%sp, CCFSZ + SPOFF, %o0
2095	b,a	%xcc, tl0_ret
2096	 nop
2097END(tl0_trap)
2098
2099/*
2100 * System call entry point.
2101 *
2102 * Essentially the same as tl0_trap but calls syscall.
2103 */
2104ENTRY(tl0_syscall)
2105	/*
2106	 * Force kernel store order.
2107	 */
2108	wrpr	%g0, PSTATE_ALT, %pstate
2109
2110	rdpr	%tstate, %l0
2111	rdpr	%tpc, %l1
2112	rdpr	%tnpc, %l2
2113	rd	%y, %l3
2114	rd	%fprs, %l4
2115	rdpr	%wstate, %l5
2116
2117#if KTR_COMPILE & KTR_SYSC
2118	CATR(KTR_SYSC,
2119	    "tl0_syscall: td=%p type=%#x pil=%#lx pc=%#lx npc=%#lx sp=%#lx"
2120	    , %g1, %g2, %g3, 7, 8, 9)
2121	ldx	[PCPU(CURTHREAD)], %g2
2122	stx	%g2, [%g1 + KTR_PARM1]
2123	stx	%o0, [%g1 + KTR_PARM2]
2124	rdpr	%pil, %g2
2125	stx	%g2, [%g1 + KTR_PARM3]
2126	stx	%l1, [%g1 + KTR_PARM4]
2127	stx	%l2, [%g1 + KTR_PARM5]
2128	stx	%i6, [%g1 + KTR_PARM6]
21299:
2130#endif
2131
2132	and	%l5, WSTATE_NORMAL_MASK, %l5
2133	sllx	%l5, WSTATE_OTHER_SHIFT, %l5
2134	wrpr	%l5, WSTATE_KERNEL, %wstate
2135	rdpr	%canrestore, %l6
2136	wrpr	%l6, 0, %otherwin
2137	wrpr	%g0, 0, %canrestore
2138
2139	sub	PCB_REG, SPOFF + CCFSZ + TF_SIZEOF, %sp
2140
2141	stw	%o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
2142
2143	stx	%l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
2144	stx	%l1, [%sp + SPOFF + CCFSZ + TF_TPC]
2145	stx	%l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
2146	stw	%l3, [%sp + SPOFF + CCFSZ + TF_Y]
2147	stb	%l4, [%sp + SPOFF + CCFSZ + TF_FPRS]
2148	stb	%l5, [%sp + SPOFF + CCFSZ + TF_WSTATE]
2149
2150	wr	%g0, FPRS_FEF, %fprs
2151	stx	%fsr, [%sp + SPOFF + CCFSZ + TF_FSR]
2152	wr	%g0, 0, %fprs
2153
2154	mov	PCPU_REG, %o0
2155	wrpr	%g0, PSTATE_NORMAL, %pstate
2156
2157	stx	%g1, [%sp + SPOFF + CCFSZ + TF_G1]
2158	stx	%g2, [%sp + SPOFF + CCFSZ + TF_G2]
2159	stx	%g3, [%sp + SPOFF + CCFSZ + TF_G3]
2160	stx	%g4, [%sp + SPOFF + CCFSZ + TF_G4]
2161	stx	%g5, [%sp + SPOFF + CCFSZ + TF_G5]
2162	stx	%g6, [%sp + SPOFF + CCFSZ + TF_G6]
2163	stx	%g7, [%sp + SPOFF + CCFSZ + TF_G7]
2164
2165	mov	%o0, PCPU_REG
2166	wrpr	%g0, PSTATE_KERNEL, %pstate
2167
2168	stx	%i0, [%sp + SPOFF + CCFSZ + TF_O0]
2169	stx	%i1, [%sp + SPOFF + CCFSZ + TF_O1]
2170	stx	%i2, [%sp + SPOFF + CCFSZ + TF_O2]
2171	stx	%i3, [%sp + SPOFF + CCFSZ + TF_O3]
2172	stx	%i4, [%sp + SPOFF + CCFSZ + TF_O4]
2173	stx	%i5, [%sp + SPOFF + CCFSZ + TF_O5]
2174	stx	%i6, [%sp + SPOFF + CCFSZ + TF_O6]
2175	stx	%i7, [%sp + SPOFF + CCFSZ + TF_O7]
2176
2177	call	syscall
2178	 add	%sp, CCFSZ + SPOFF, %o0
2179	b,a	%xcc, tl0_ret
2180	 nop
2181END(tl0_syscall)
2182
2183ENTRY(tl0_intr)
2184	/*
2185	 * Force kernel store order.
2186	 */
2187	wrpr	%g0, PSTATE_ALT, %pstate
2188
2189	rdpr	%tstate, %l0
2190	rdpr	%tpc, %l1
2191	rdpr	%tnpc, %l2
2192	rd	%y, %l3
2193	rd	%fprs, %l4
2194	rdpr	%wstate, %l5
2195
2196#if KTR_COMPILE & KTR_INTR
2197	CATR(KTR_INTR,
2198	    "tl0_intr: td=%p type=%#x pil=%#lx pc=%#lx npc=%#lx sp=%#lx"
2199	    , %g1, %g2, %g3, 7, 8, 9)
2200	ldx	[PCPU(CURTHREAD)], %g2
2201	stx	%g2, [%g1 + KTR_PARM1]
2202	stx	%o0, [%g1 + KTR_PARM2]
2203	rdpr	%pil, %g2
2204	stx	%g2, [%g1 + KTR_PARM3]
2205	stx	%l1, [%g1 + KTR_PARM4]
2206	stx	%l2, [%g1 + KTR_PARM5]
2207	stx	%i6, [%g1 + KTR_PARM6]
22089:
2209#endif
2210
2211	and	%l5, WSTATE_NORMAL_MASK, %l5
2212	sllx	%l5, WSTATE_OTHER_SHIFT, %l5
2213	wrpr	%l5, WSTATE_KERNEL, %wstate
2214	rdpr	%canrestore, %l6
2215	wrpr	%l6, 0, %otherwin
2216	wrpr	%g0, 0, %canrestore
2217
2218	sub	PCB_REG, SPOFF + CCFSZ + TF_SIZEOF, %sp
2219
2220	stx	%l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
2221	stx	%l1, [%sp + SPOFF + CCFSZ + TF_TPC]
2222	stx	%l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
2223	stw	%l3, [%sp + SPOFF + CCFSZ + TF_Y]
2224	stb	%l4, [%sp + SPOFF + CCFSZ + TF_FPRS]
2225	stb	%l5, [%sp + SPOFF + CCFSZ + TF_WSTATE]
2226
2227	wr	%g0, FPRS_FEF, %fprs
2228	stx	%fsr, [%sp + SPOFF + CCFSZ + TF_FSR]
2229	wr	%g0, 0, %fprs
2230
2231	mov	T_INTERRUPT, %o0
2232	stw	%o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
2233	stw	%o2, [%sp + SPOFF + CCFSZ + TF_LEVEL]
2234
2235	mov	PCPU_REG, %o0
2236	wrpr	%g0, PSTATE_NORMAL, %pstate
2237
2238	stx	%g1, [%sp + SPOFF + CCFSZ + TF_G1]
2239	stx	%g2, [%sp + SPOFF + CCFSZ + TF_G2]
2240	stx	%g3, [%sp + SPOFF + CCFSZ + TF_G3]
2241	stx	%g4, [%sp + SPOFF + CCFSZ + TF_G4]
2242	stx	%g5, [%sp + SPOFF + CCFSZ + TF_G5]
2243	stx	%g6, [%sp + SPOFF + CCFSZ + TF_G6]
2244	stx	%g7, [%sp + SPOFF + CCFSZ + TF_G7]
2245
2246	mov	%o0, PCPU_REG
2247	wrpr	%g0, PSTATE_KERNEL, %pstate
2248
2249	stx	%i0, [%sp + SPOFF + CCFSZ + TF_O0]
2250	stx	%i1, [%sp + SPOFF + CCFSZ + TF_O1]
2251	stx	%i2, [%sp + SPOFF + CCFSZ + TF_O2]
2252	stx	%i3, [%sp + SPOFF + CCFSZ + TF_O3]
2253	stx	%i4, [%sp + SPOFF + CCFSZ + TF_O4]
2254	stx	%i5, [%sp + SPOFF + CCFSZ + TF_O5]
2255	stx	%i6, [%sp + SPOFF + CCFSZ + TF_O6]
2256	stx	%i7, [%sp + SPOFF + CCFSZ + TF_O7]
2257
2258	SET(cnt+V_INTR, %l1, %l0)
2259	ATOMIC_INC_INT(%l0, %l1, %l2)
2260
2261	SET(intr_handlers, %l1, %l0)
2262	sllx	%o2, IH_SHIFT, %l1
2263	ldx	[%l0 + %l1], %l1
2264	call	%l1
2265	 add	%sp, CCFSZ + SPOFF, %o0
2266	b,a	%xcc, tl0_ret
2267	 nop
2268END(tl0_intr)
2269
2270ENTRY(tl0_ret)
2271#if KTR_COMPILE & KTR_TRAP
2272	CATR(KTR_TRAP, "tl0_ret: check ast td=%p (%s) pil=%#lx sflag=%#x"
2273	    , %g1, %g2, %g3, 7, 8, 9)
2274	ldx	[PCPU(CURTHREAD)], %g2
2275	stx	%g2, [%g1 + KTR_PARM1]
2276	ldx	[%g2 + TD_PROC], %g2
2277	add	%g2, P_COMM, %g3
2278	stx	%g3, [%g1 + KTR_PARM2]
2279	rdpr	%pil, %g3
2280	stx	%g3, [%g1 + KTR_PARM3]
2281	lduw	[%g2 + P_SFLAG], %g3
2282	stx	%g3, [%g1 + KTR_PARM4]
22839:
2284#endif
2285
2286	wrpr	%g0, PIL_TICK, %pil
2287	ldx	[PCPU(CURTHREAD)], %l0
2288	ldx	[%l0 + TD_KSE], %l1
2289	lduw	[%l1 + KE_FLAGS], %l2
2290	and	%l2, KEF_ASTPENDING | KEF_NEEDRESCHED, %l2
2291	brz,pt	%l2, 1f
2292	 nop
2293	call	ast
2294	 add	%sp, CCFSZ + SPOFF, %o0
2295
22961:	ldx	[%l0 + TD_PCB], %l1
2297	ldx	[%l1 + PCB_NSAVED], %l2
2298	mov	T_SPILL, %o0
2299	brnz,a,pn %l2, .Ltl0_trap_reenter
2300	 stw	%o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
2301
2302	ldx	[%sp + SPOFF + CCFSZ + TF_G1], %g1
2303	ldx	[%sp + SPOFF + CCFSZ + TF_G2], %g2
2304	ldx	[%sp + SPOFF + CCFSZ + TF_G3], %g3
2305	ldx	[%sp + SPOFF + CCFSZ + TF_G4], %g4
2306	ldx	[%sp + SPOFF + CCFSZ + TF_G5], %g5
2307	ldx	[%sp + SPOFF + CCFSZ + TF_G6], %g6
2308	ldx	[%sp + SPOFF + CCFSZ + TF_G7], %g7
2309
2310	ldx	[%sp + SPOFF + CCFSZ + TF_O0], %i0
2311	ldx	[%sp + SPOFF + CCFSZ + TF_O1], %i1
2312	ldx	[%sp + SPOFF + CCFSZ + TF_O2], %i2
2313	ldx	[%sp + SPOFF + CCFSZ + TF_O3], %i3
2314	ldx	[%sp + SPOFF + CCFSZ + TF_O4], %i4
2315	ldx	[%sp + SPOFF + CCFSZ + TF_O5], %i5
2316	ldx	[%sp + SPOFF + CCFSZ + TF_O6], %i6
2317	ldx	[%sp + SPOFF + CCFSZ + TF_O7], %i7
2318
2319	ldx	[%sp + SPOFF + CCFSZ + TF_TSTATE], %l0
2320	ldx	[%sp + SPOFF + CCFSZ + TF_TPC], %l1
2321	ldx	[%sp + SPOFF + CCFSZ + TF_TNPC], %l2
2322	lduw	[%sp + SPOFF + CCFSZ + TF_Y], %l3
2323	ldub	[%sp + SPOFF + CCFSZ + TF_FPRS], %l4
2324	ldub	[%sp + SPOFF + CCFSZ + TF_WSTATE], %l5
2325
2326	wrpr	%g0, PSTATE_ALT, %pstate
2327
2328	wrpr	%g0, 0, %pil
2329	wrpr	%l1, 0, %tpc
2330	wrpr	%l2, 0, %tnpc
2331	wr	%l3, 0, %y
2332
2333	andn	%l0, TSTATE_CWP_MASK, %g1
2334	mov	%l4, %g2
2335
2336	srlx	%l5, WSTATE_OTHER_SHIFT, %g3
2337	wrpr	%g3, WSTATE_TRANSITION, %wstate
2338	rdpr	%otherwin, %o0
2339	wrpr	%o0, 0, %canrestore
2340	wrpr	%g0, 0, %otherwin
2341	wrpr	%o0, 0, %cleanwin
2342
2343	/*
2344	 * If this instruction causes a fill trap which fails to fill a window
2345	 * from the user stack, we will resume at tl0_ret_fill_end and call
2346	 * back into the kernel.
2347	 */
2348	restore
2349tl0_ret_fill:
2350
2351	rdpr	%cwp, %g4
2352	wrpr	%g1, %g4, %tstate
2353	wr	%g2, 0, %fprs
2354	wrpr	%g3, 0, %wstate
2355
2356#if KTR_COMPILE & KTR_TRAP
2357	CATR(KTR_TRAP, "tl0_ret: td=%#lx pil=%#lx pc=%#lx npc=%#lx sp=%#lx"
2358	    , %g2, %g3, %g4, 7, 8, 9)
2359	ldx	[PCPU(CURTHREAD)], %g3
2360	stx	%g3, [%g2 + KTR_PARM1]
2361	rdpr	%pil, %g3
2362	stx	%g3, [%g2 + KTR_PARM2]
2363	rdpr	%tpc, %g3
2364	stx	%g3, [%g2 + KTR_PARM3]
2365	rdpr	%tnpc, %g3
2366	stx	%g3, [%g2 + KTR_PARM4]
2367	stx	%sp, [%g2 + KTR_PARM5]
23689:
2369#endif
2370
2371	retry
2372tl0_ret_fill_end:
2373
2374#if KTR_COMPILE & KTR_TRAP
2375	CATR(KTR_TRAP, "tl0_ret: fill magic ps=%#lx ws=%#lx sp=%#lx"
2376	    , %l0, %l1, %l2, 7, 8, 9)
2377	rdpr	%pstate, %l1
2378	stx	%l1, [%l0 + KTR_PARM1]
2379	stx	%l5, [%l0 + KTR_PARM2]
2380	stx	%sp, [%l0 + KTR_PARM3]
23819:
2382#endif
2383
2384	/*
2385	 * The fill failed and magic has been performed.  Call trap again,
2386	 * which will copyin the window on the user's behalf.
2387	 */
2388	wrpr	%l5, 0, %wstate
2389	wrpr	%g0, PSTATE_ALT, %pstate
2390	mov	PCPU_REG, %o0
2391	wrpr	%g0, PSTATE_NORMAL, %pstate
2392	mov	%o0, PCPU_REG
2393	wrpr	%g0, PSTATE_KERNEL, %pstate
2394	mov	T_FILL, %o0
2395	b	%xcc, .Ltl0_trap_reenter
2396	 stw	%o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
2397END(tl0_ret)
2398
2399/*
2400 * Kernel trap entry point
2401 *
2402 * void tl1_trap(u_int type, u_char pil, u_long o2, u_long tar, u_long sfar,
2403 *		 u_int sfsr)
2404 *
2405 * This is easy because the stack is already setup and the windows don't need
2406 * to be split.  We build a trapframe and call trap(), the same as above, but
2407 * the outs don't need to be saved.
2408 */
2409ENTRY(tl1_trap)
2410	sub	%sp, TF_SIZEOF, %sp
2411
2412	rdpr	%tstate, %l0
2413	rdpr	%tpc, %l1
2414	rdpr	%tnpc, %l2
2415	mov	%o1, %l3
2416
2417#if KTR_COMPILE & KTR_TRAP
2418	CATR(KTR_TRAP, "tl1_trap: td=%p type=%#lx pil=%#lx pc=%#lx sp=%#lx"
2419	    , %g1, %g2, %g3, 7, 8, 9)
2420	ldx	[PCPU(CURTHREAD)], %g2
2421	stx	%g2, [%g1 + KTR_PARM1]
2422	andn	%o0, T_KERNEL, %g2
2423	stx	%g2, [%g1 + KTR_PARM2]
2424	stx	%o1, [%g1 + KTR_PARM3]
2425	stx	%l1, [%g1 + KTR_PARM4]
2426	stx	%i6, [%g1 + KTR_PARM5]
24279:
2428#endif
2429
2430	wrpr	%g0, 1, %tl
2431
2432	stx	%l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
2433	stx	%l1, [%sp + SPOFF + CCFSZ + TF_TPC]
2434	stx	%l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
2435
2436	stw	%o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
2437	stb	%o1, [%sp + SPOFF + CCFSZ + TF_PIL]
2438	stx	%o3, [%sp + SPOFF + CCFSZ + TF_TAR]
2439	stx	%o4, [%sp + SPOFF + CCFSZ + TF_SFAR]
2440	stw	%o5, [%sp + SPOFF + CCFSZ + TF_SFSR]
2441
2442	stx	%i6, [%sp + SPOFF + CCFSZ + TF_O6]
2443	stx	%i7, [%sp + SPOFF + CCFSZ + TF_O7]
2444
2445	mov	PCPU_REG, %o0
2446	wrpr	%g0, PSTATE_NORMAL, %pstate
2447
2448	stx	%g1, [%sp + SPOFF + CCFSZ + TF_G1]
2449	stx	%g2, [%sp + SPOFF + CCFSZ + TF_G2]
2450	stx	%g3, [%sp + SPOFF + CCFSZ + TF_G3]
2451	stx	%g4, [%sp + SPOFF + CCFSZ + TF_G4]
2452	stx	%g5, [%sp + SPOFF + CCFSZ + TF_G5]
2453	stx	%g6, [%sp + SPOFF + CCFSZ + TF_G6]
2454
2455	mov	%o0, PCPU_REG
2456	wrpr	%g0, PSTATE_KERNEL, %pstate
2457
2458	call	trap
2459	 add	%sp, CCFSZ + SPOFF, %o0
2460
2461	ldx	[%sp + SPOFF + CCFSZ + TF_TSTATE], %l0
2462	ldx	[%sp + SPOFF + CCFSZ + TF_TPC], %l1
2463	ldx	[%sp + SPOFF + CCFSZ + TF_TNPC], %l2
2464	ldub	[%sp + SPOFF + CCFSZ + TF_PIL], %l3
2465
2466	ldx	[%sp + SPOFF + CCFSZ + TF_G1], %g1
2467	ldx	[%sp + SPOFF + CCFSZ + TF_G2], %g2
2468	ldx	[%sp + SPOFF + CCFSZ + TF_G3], %g3
2469	ldx	[%sp + SPOFF + CCFSZ + TF_G4], %g4
2470	ldx	[%sp + SPOFF + CCFSZ + TF_G5], %g5
2471	ldx	[%sp + SPOFF + CCFSZ + TF_G6], %g6
2472
2473	wrpr	%g0, PSTATE_ALT, %pstate
2474
2475	andn	%l0, TSTATE_CWP_MASK, %g1
2476	mov	%l1, %g2
2477	mov	%l2, %g3
2478
2479	wrpr	%l3, 0, %pil
2480
2481	restore
2482
2483	wrpr	%g0, 2, %tl
2484
2485	rdpr	%cwp, %g4
2486	wrpr	%g1, %g4, %tstate
2487	wrpr	%g2, 0, %tpc
2488	wrpr	%g3, 0, %tnpc
2489
2490#if KTR_COMPILE & KTR_TRAP
2491	CATR(KTR_TRAP, "tl1_trap: td=%#lx pil=%#lx ts=%#lx pc=%#lx sp=%#lx"
2492	    , %g2, %g3, %g4, 7, 8, 9)
2493	ldx	[PCPU(CURTHREAD)], %g3
2494	stx	%g3, [%g2 + KTR_PARM1]
2495	rdpr	%pil, %g3
2496	stx	%g3, [%g2 + KTR_PARM2]
2497	rdpr	%tstate, %g3
2498	stx	%g3, [%g2 + KTR_PARM3]
2499	rdpr	%tpc, %g3
2500	stx	%g3, [%g2 + KTR_PARM4]
2501	stx	%sp, [%g2 + KTR_PARM5]
25029:
2503#endif
2504
2505	retry
2506END(tl1_trap)
2507
2508ENTRY(tl1_intr)
2509	sub	%sp, TF_SIZEOF, %sp
2510
2511	rdpr	%tstate, %l0
2512	rdpr	%tpc, %l1
2513	rdpr	%tnpc, %l2
2514	mov	%o1, %l3
2515
2516#if KTR_COMPILE & KTR_INTR
2517	CATR(KTR_INTR, "tl1_intr: td=%p type=%#lx pil=%#lx pc=%#lx sp=%#lx"
2518	    , %g1, %g2, %g3, 7, 8, 9)
2519	ldx	[PCPU(CURTHREAD)], %g2
2520	stx	%g2, [%g1 + KTR_PARM1]
2521	andn	%o0, T_KERNEL, %g2
2522	stx	%g2, [%g1 + KTR_PARM2]
2523	stx	%o1, [%g1 + KTR_PARM3]
2524	stx	%l1, [%g1 + KTR_PARM4]
2525	stx	%i6, [%g1 + KTR_PARM5]
25269:
2527#endif
2528
2529	wrpr	%g0, 1, %tl
2530
2531	stx	%l0, [%sp + SPOFF + CCFSZ + TF_TSTATE]
2532	stx	%l1, [%sp + SPOFF + CCFSZ + TF_TPC]
2533	stx	%l2, [%sp + SPOFF + CCFSZ + TF_TNPC]
2534
2535	mov	T_INTERRUPT | T_KERNEL, %o0
2536	stw	%o0, [%sp + SPOFF + CCFSZ + TF_TYPE]
2537	stb	%o1, [%sp + SPOFF + CCFSZ + TF_PIL]
2538	stw	%o2, [%sp + SPOFF + CCFSZ + TF_LEVEL]
2539
2540	stx	%i6, [%sp + SPOFF + CCFSZ + TF_O6]
2541	stx	%i7, [%sp + SPOFF + CCFSZ + TF_O7]
2542
2543	mov	PCPU_REG, %o0
2544	wrpr	%g0, PSTATE_NORMAL, %pstate
2545
2546	stx	%g1, [%sp + SPOFF + CCFSZ + TF_G1]
2547	stx	%g2, [%sp + SPOFF + CCFSZ + TF_G2]
2548	stx	%g3, [%sp + SPOFF + CCFSZ + TF_G3]
2549	stx	%g4, [%sp + SPOFF + CCFSZ + TF_G4]
2550	stx	%g5, [%sp + SPOFF + CCFSZ + TF_G5]
2551	stx	%g6, [%sp + SPOFF + CCFSZ + TF_G6]
2552
2553	mov	%o0, PCPU_REG
2554	wrpr	%g0, PSTATE_KERNEL, %pstate
2555
2556	SET(cnt+V_INTR, %l5, %l4)
2557	ATOMIC_INC_INT(%l4, %l5, %l6)
2558
2559	SET(intr_handlers, %l5, %l4)
2560	sllx	%o2, IH_SHIFT, %l5
2561	ldx	[%l4 + %l5], %l5
2562	call	%l5
2563	 add	%sp, CCFSZ + SPOFF, %o0
2564
2565	ldx	[%sp + SPOFF + CCFSZ + TF_G1], %g1
2566	ldx	[%sp + SPOFF + CCFSZ + TF_G2], %g2
2567	ldx	[%sp + SPOFF + CCFSZ + TF_G3], %g3
2568	ldx	[%sp + SPOFF + CCFSZ + TF_G4], %g4
2569	ldx	[%sp + SPOFF + CCFSZ + TF_G5], %g5
2570	ldx	[%sp + SPOFF + CCFSZ + TF_G6], %g6
2571
2572	wrpr	%g0, PSTATE_ALT, %pstate
2573
2574	andn	%l0, TSTATE_CWP_MASK, %g1
2575	mov	%l1, %g2
2576	mov	%l2, %g3
2577	wrpr	%l3, 0, %pil
2578
2579	restore
2580
2581	wrpr	%g0, 2, %tl
2582
2583	rdpr	%cwp, %g4
2584	wrpr	%g1, %g4, %tstate
2585	wrpr	%g2, 0, %tpc
2586	wrpr	%g3, 0, %tnpc
2587
2588#if KTR_COMPILE & KTR_INTR
2589	CATR(KTR_INTR, "tl1_intr: td=%#lx pil=%#lx ts=%#lx pc=%#lx sp=%#lx"
2590	    , %g2, %g3, %g4, 7, 8, 9)
2591	ldx	[PCPU(CURTHREAD)], %g3
2592	stx	%g3, [%g2 + KTR_PARM1]
2593	rdpr	%pil, %g3
2594	stx	%g3, [%g2 + KTR_PARM2]
2595	rdpr	%tstate, %g3
2596	stx	%g3, [%g2 + KTR_PARM3]
2597	rdpr	%tpc, %g3
2598	stx	%g3, [%g2 + KTR_PARM4]
2599	stx	%sp, [%g2 + KTR_PARM5]
26009:
2601#endif
2602
2603	retry
2604END(tl1_intr)
2605
2606/*
2607 * Freshly forked processes come here when switched to for the first time.
2608 * The arguments to fork_exit() have been setup in the locals, we must move
2609 * them to the outs.
2610 */
2611ENTRY(fork_trampoline)
2612#if KTR_COMPILE & KTR_PROC
2613	CATR(KTR_PROC, "fork_trampoline: td=%p (%s) cwp=%#lx"
2614	    , %g1, %g2, %g3, 7, 8, 9)
2615	ldx	[PCPU(CURTHREAD)], %g2
2616	stx	%g2, [%g1 + KTR_PARM1]
2617	ldx	[%g2 + TD_PROC], %g2
2618	add	%g2, P_COMM, %g2
2619	stx	%g2, [%g1 + KTR_PARM2]
2620	rdpr	%cwp, %g2
2621	stx	%g2, [%g1 + KTR_PARM3]
26229:
2623#endif
2624	mov	%l0, %o0
2625	mov	%l1, %o1
2626	call	fork_exit
2627	 mov	%l2, %o2
2628	b,a	%xcc, tl0_ret
2629	 nop
2630END(fork_trampoline)
2631