1139749Simp/*	$NetBSD: vector.S,v 1.38.8.1 2012/04/23 16:34:16 riz Exp $	*/
265942Sgibbs
365942Sgibbs/*-
465942Sgibbs * Copyright (c) 1998, 2007, 2008 The NetBSD Foundation, Inc.
571717Sgibbs * All rights reserved.
665942Sgibbs *
765942Sgibbs * This code is derived from software contributed to The NetBSD Foundation
865942Sgibbs * by Charles M. Hannum and by Andrew Doran.
965942Sgibbs *
1065942Sgibbs * Redistribution and use in source and binary forms, with or without
1165942Sgibbs * modification, are permitted provided that the following conditions
1295378Sgibbs * are met:
1395378Sgibbs * 1. Redistributions of source code must retain the above copyright
1495378Sgibbs *    notice, this list of conditions and the following disclaimer.
1595378Sgibbs * 2. Redistributions in binary form must reproduce the above copyright
1695378Sgibbs *    notice, this list of conditions and the following disclaimer in the
1795378Sgibbs *    documentation and/or other materials provided with the distribution.
1895378Sgibbs *
1995378Sgibbs * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
2095378Sgibbs * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
2195378Sgibbs * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
2265942Sgibbs * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
2371717Sgibbs * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
2495378Sgibbs * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
2595378Sgibbs * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
2671717Sgibbs * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
2795378Sgibbs * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
2895378Sgibbs * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
2995378Sgibbs * POSSIBILITY OF SUCH DAMAGE.
3095378Sgibbs */
3195378Sgibbs
3295378Sgibbs/*
3365942Sgibbs * Copyright (c) 2001 Wasabi Systems, Inc.
3465942Sgibbs * All rights reserved.
3595378Sgibbs *
3695378Sgibbs * Written by Frank van der Linden for Wasabi Systems, Inc.
3795378Sgibbs *
3895378Sgibbs * Redistribution and use in source and binary forms, with or without
3965942Sgibbs * modification, are permitted provided that the following conditions
40123579Sgibbs * are met:
4165942Sgibbs * 1. Redistributions of source code must retain the above copyright
4265942Sgibbs *    notice, this list of conditions and the following disclaimer.
4395378Sgibbs * 2. Redistributions in binary form must reproduce the above copyright
4495378Sgibbs *    notice, this list of conditions and the following disclaimer in the
4595378Sgibbs *    documentation and/or other materials provided with the distribution.
4695378Sgibbs * 3. All advertising materials mentioning features or use of this software
4795378Sgibbs *    must display the following acknowledgement:
48123579Sgibbs *      This product includes software developed for the NetBSD Project by
49123579Sgibbs *      Wasabi Systems, Inc.
5095378Sgibbs * 4. The name of Wasabi Systems, Inc. may not be used to endorse
5165942Sgibbs *    or promote products derived from this software without specific prior
5265942Sgibbs *    written permission.
5395378Sgibbs *
5465942Sgibbs * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND
5565942Sgibbs * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
5665942Sgibbs * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
5765942Sgibbs * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL WASABI SYSTEMS, INC
5865942Sgibbs * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
59107411Sscottl * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
60107411Sscottl * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
6165942Sgibbs * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
62114621Sgibbs * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
63114621Sgibbs * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
64114621Sgibbs * POSSIBILITY OF SUCH DAMAGE.
65102669Sgibbs */
6665942Sgibbs
67132107Sstefanf#include <machine/asm.h>
6865942Sgibbs
6965942Sgibbs#include "opt_ddb.h"
70115915Sgibbs#include "opt_multiprocessor.h"
7165942Sgibbs#include "opt_xen.h"
7265942Sgibbs#include "opt_dtrace.h"
7365942Sgibbs
7465942Sgibbs#define ALIGN_TEXT	.align 16,0x90
7565942Sgibbs
7665942Sgibbs#include <machine/i8259.h>
7765942Sgibbs#include <machine/i82093reg.h>
7865942Sgibbs#include <machine/i82489reg.h>
7965942Sgibbs#include <machine/frameasm.h>
8065942Sgibbs#include <machine/segments.h>
8165942Sgibbs#include <machine/trap.h>
8265942Sgibbs#include <machine/specialreg.h>
8365942Sgibbs
84107411Sscottl#include "ioapic.h"
85115915Sgibbs#include "lapic.h"
86115915Sgibbs#include "assym.h"
87115915Sgibbs
88115915Sgibbs/*****************************************************************************/
89115915Sgibbs
90115915Sgibbs/*
91107411Sscottl * Trap and fault vector routines
92107411Sscottl *
93107411Sscottl * On exit from the kernel to user mode, we always need to check for ASTs.  In
94107411Sscottl * addition, we need to do this atomically; otherwise an interrupt may occur
95107411Sscottl * which causes an AST, but it won't get processed until the next kernel entry
96107411Sscottl * (possibly the next clock tick).  Thus, we disable interrupt before checking,
97107411Sscottl * and only enable them again on the final `iret' or before calling the AST
98107411Sscottl * handler.
99107411Sscottl */
100107411Sscottl
101107411Sscottl/*****************************************************************************/
10265942Sgibbs
10365942Sgibbs#ifdef	XEN
10465942Sgibbs#define	PRE_TRAP	movq (%rsp),%rcx ; movq 8(%rsp),%r11 ; addq $0x10,%rsp
10565942Sgibbs#else
10665942Sgibbs#define	PRE_TRAP
10765942Sgibbs#endif
10865942Sgibbs
10965942Sgibbs#define	TRAP_NJ(a)	PRE_TRAP ; pushq $(a)
11065942Sgibbs#define	ZTRAP_NJ(a)	PRE_TRAP ; pushq $0 ; pushq $(a)
11165942Sgibbs#define	TRAP(a)		TRAP_NJ(a) ; jmp _C_LABEL(alltraps)
11265942Sgibbs#define	ZTRAP(a)	ZTRAP_NJ(a) ; jmp _C_LABEL(alltraps)
11365942Sgibbs
11465942Sgibbs	.text
11565942Sgibbs
11665942SgibbsIDTVEC(trap00)
11765942Sgibbs	ZTRAP(T_DIVIDE)
11865942Sgibbs
11965942SgibbsIDTVEC(trap01)
12065942Sgibbs	ZTRAP(T_TRCTRAP)
12165942Sgibbs
12265942SgibbsIDTVEC(trap02)
12365942Sgibbs#if defined(XEN)
12465942Sgibbs	ZTRAP(T_NMI)
12565942Sgibbs#else /* defined(XEN) */
12665942Sgibbs	pushq $0
12795378Sgibbs	pushq $T_NMI
12865942Sgibbs	subq	$TF_REGSIZE,%rsp
12965942Sgibbs	INTR_SAVE_GPRS
130102669Sgibbs	movl	$MSR_GSBASE,%ecx
13165942Sgibbs	rdmsr
13274094Sgibbs	cmpl	$VM_MIN_KERNEL_ADDRESS_HIGH32,%edx
13374094Sgibbs	jae	1f
13465942Sgibbs	swapgs
13579874Sgibbs	movw	%gs,TF_GS(%rsp)
136102669Sgibbs	movw	%fs,TF_FS(%rsp)
13765942Sgibbs	movw	%es,TF_ES(%rsp)
13865942Sgibbs	movw	%ds,TF_DS(%rsp)
13965942Sgibbs	movq	%rsp,%rdi
14095378Sgibbs	incq	CPUVAR(NTRAP)
14165942Sgibbs	call	_C_LABEL(trap)
14265942Sgibbs	movw	TF_ES(%rsp),%es
14365942Sgibbs	movw	TF_DS(%rsp),%ds
14495378Sgibbs	swapgs
14595378Sgibbs	jmp	2f
14695378Sgibbs1:
14795378Sgibbs	movq	%rsp,%rdi
14895378Sgibbs	incq	CPUVAR(NTRAP)
14995378Sgibbs	call	_C_LABEL(trap)
15095378Sgibbs2:
15195378Sgibbs	INTR_RESTORE_GPRS
15279874Sgibbs	addq	$TF_REGSIZE+16,%rsp
15379874Sgibbs	iretq
154114621Sgibbs#endif /* defined(XEN) */
155114621Sgibbs
15665942SgibbsIDTVEC(trap03)
157114621Sgibbs#ifndef KDTRACE_HOOKS
158114621Sgibbs	ZTRAP(T_BPTFLT)
159114621Sgibbs#else
160114621Sgibbs	ZTRAP_NJ(T_BPTFLT)
161115917Sgibbs	INTRENTRY
16265942Sgibbs  	STI(si)
16365942Sgibbs	/*
16465942Sgibbs	 * DTrace Function Boundary Trace (fbt) probes are triggered
16574094Sgibbs	 * by int3 (0xcc).
16674094Sgibbs	 */
16774094Sgibbs	/* Check if there is no DTrace hook registered. */
16874094Sgibbs	cmpq	$0,dtrace_invop_jump_addr
16974094Sgibbs	je	calltrap
17074094Sgibbs
17174094Sgibbs	/*
17274094Sgibbs	 * Set our jump address for the jump back in the event that
17374094Sgibbs	 * the exception wasn't caused by DTrace at all.
17474094Sgibbs	 */
17574094Sgibbs	/* XXX: This doesn't look right for SMP - unless it is a
17674094Sgibbs	 * constant - so why set it everytime. (dsl) */
177123579Sgibbs	movq	$calltrap, dtrace_invop_calltrap_addr(%rip)
17874094Sgibbs
17974094Sgibbs	/* Jump to the code hooked in by DTrace. */
18074094Sgibbs	movq	dtrace_invop_jump_addr, %rax
18174094Sgibbs	jmpq	*dtrace_invop_jump_addr
18274094Sgibbs
18374094Sgibbs	.bss
18479874Sgibbs	.globl	dtrace_invop_jump_addr
18565942Sgibbs	.align	8
18665942Sgibbs	.type	dtrace_invop_jump_addr, @object
18765942Sgibbs	.size	dtrace_invop_jump_addr, 8
18865942Sgibbsdtrace_invop_jump_addr:
18965942Sgibbs	.zero	8
19065942Sgibbs	.globl	dtrace_invop_calltrap_addr
19165942Sgibbs	.align	8
19265942Sgibbs	.type	dtrace_invop_calltrap_addr, @object
19365942Sgibbs	.size	dtrace_invop_calltrap_addr, 8
19465942Sgibbsdtrace_invop_calltrap_addr:
19565942Sgibbs	.zero	8
19665942Sgibbs	.text
19774094Sgibbs#endif
19865942Sgibbs
19965942SgibbsIDTVEC(trap04)
20065942Sgibbs	ZTRAP(T_OFLOW)
20165942Sgibbs
20265942SgibbsIDTVEC(trap05)
20365942Sgibbs	ZTRAP(T_BOUND)
20465942Sgibbs
20565942SgibbsIDTVEC(trap06)
20665942Sgibbs	ZTRAP(T_PRIVINFLT)
20765942Sgibbs
20865942SgibbsIDTVEC(trap07)
20965942Sgibbs	ZTRAP_NJ(T_ASTFLT)
21065942Sgibbs	INTRENTRY
21165942Sgibbs#ifdef DIAGNOSTIC
21265942Sgibbs	movl	CPUVAR(ILEVEL),%ebx
21365942Sgibbs#endif /* DIAGNOSTIC */
21465942Sgibbs	movq	CPUVAR(SELF),%rdi
215102669Sgibbs	call	_C_LABEL(fpudna)
216102669Sgibbs	jmp	.Lalltraps_checkusr
21765942Sgibbs
21865942SgibbsIDTVEC(trap08)
21965942Sgibbs	TRAP(T_DOUBLEFLT)
22065942Sgibbs
221102669SgibbsIDTVEC(trap09)
22265942Sgibbs	ZTRAP(T_FPOPFLT)
22365942Sgibbs
22465942SgibbsIDTVEC(trap0a)
22565942Sgibbs	TRAP(T_TSSFLT)
22665942Sgibbs
227102669Sgibbs#ifdef XEN
228102669Sgibbs/*
229102669Sgibbs * I don't believe XEN generates in-kernel traps for the
230102669Sgibbs * equivalent of iret, if it does this code would be needed
23165942Sgibbs * in order to copy the user segment registers into the fault frame.
23265942Sgibbs */
23365942Sgibbs#define check_swapgs alltraps
23465942Sgibbs#endif
23565942Sgibbs
23665942SgibbsIDTVEC(trap0b)		/* #NP() Segment not present */
23765942Sgibbs	TRAP_NJ(T_SEGNPFLT)
23865942Sgibbs	jmp	check_swapgs
23965942Sgibbs
24065942SgibbsIDTVEC(trap0c)		/* #SS() Stack exception */
24165942Sgibbs	TRAP_NJ(T_STKFLT)
242114621Sgibbs	jmp	check_swapgs
243114621Sgibbs
244114621SgibbsIDTVEC(trap0d)		/* #GP() General protection */
24565942Sgibbs	TRAP_NJ(T_PROTFLT)
24665942Sgibbs#ifdef check_swapgs
24765942Sgibbs	jmp	check_swapgs
24865942Sgibbs#else
24965942Sgibbs/* We need to worry about traps while the kernel %gs_base isn't loaded.
25065942Sgibbs * These are either loads to %gs (only 32bit) or faults on iret during
25165942Sgibbs * return to user. */
252102669Sgibbscheck_swapgs:
253102669Sgibbs	INTRENTRY_L(3f,1:)
254102669Sgibbs2:	sti
255102669Sgibbs	jmp	calltrap
256168807Sscottl3:
25765942Sgibbs	/* Trap in kernel mode. */
25871390Sgibbs	/* If faulting instruction is 'iret' we may need to do a 'swapgs'. */
25971390Sgibbs	movq	TF_RIP(%rsp),%rax
26071390Sgibbs	cmpw	$0xcf48,(%rax)		/* Faulting instruction is iretq ? */
26171390Sgibbs	jne	5f			/* Jump if not */
26271390Sgibbs	movq	TF_RSP(%rsp),%rax	/* Must read %rsp, may be a pad word */
26365942Sgibbs	testb	$SEL_UPL,8(%rax)	/* Check %cs of outer iret frame */
26465942Sgibbs	je	2b			/* jump if iret was to kernel  */
26565942Sgibbs	jmp	1b			/* to user - must restore %gs */
26665942Sgibbs5:
267168807Sscottl	/* Not 'iret', all moves to %gs also need a swapgs */
26874972Sgibbs	movw	(%rax),%ax
26965942Sgibbs	andb	$070,%ah		/* mask mod/rm from mod/reg/rm */
27065942Sgibbs	cmpw	$0x8e+050*256,%ax	/* Any move to %gs (reg 5) */
27165942Sgibbs	jne	2b			/* No - normal kernel fault */
272114621Sgibbs	jmp	1b			/* Yes - restore %gs */
273114621Sgibbs#endif
274114621Sgibbs
275114621SgibbsIDTVEC(trap0e)
276114621Sgibbs	TRAP(T_PAGEFLT)
277114621Sgibbs
278114621SgibbsIDTVEC(intrspurious)
279114621SgibbsIDTVEC(trap0f)
280114621Sgibbs	ZTRAP_NJ(T_ASTFLT)
281114621Sgibbs	INTRENTRY
282114621Sgibbs#ifdef DIAGNOSTIC
283114621Sgibbs	movl	CPUVAR(ILEVEL),%ebx
284114621Sgibbs#endif /* DIAGNOSTIC */
285114621Sgibbs	jmp	.Lalltraps_checkusr
286114621Sgibbs
287114621SgibbsIDTVEC(trap10)
288114621Sgibbs	ZTRAP_NJ(T_ARITHTRAP)
289114621Sgibbs.Ldo_fputrap:
290114621Sgibbs	INTRENTRY
291114621Sgibbs#ifdef DIAGNOSTIC
292114621Sgibbs	movl	CPUVAR(ILEVEL),%ebx
293114621Sgibbs#endif /* DIAGNOSTIC */
29465942Sgibbs	testb	$SEL_RPL,TF_CS(%rsp)
29565942Sgibbs	jz	1f
29665942Sgibbs	movq	%rsp,%rdi
297102669Sgibbs	call	_C_LABEL(fputrap)
29865942Sgibbs	jmp	.Lalltraps_checkusr
29965942Sgibbs1:
300102669Sgibbs  	STI(si)
301102669Sgibbs	jmp	calltrap
302102669Sgibbs
303102669SgibbsIDTVEC(trap11)
30465942Sgibbs	TRAP(T_ALIGNFLT)
30565942Sgibbs
30665942SgibbsIDTVEC(trap12)
30765942Sgibbs	ZTRAP(T_MCA)
30865942Sgibbs
30965942SgibbsIDTVEC(trap13)
31065942Sgibbs	ZTRAP_NJ(T_XMM)
31165942Sgibbs	jmp	.Ldo_fputrap
31265942Sgibbs
31365942SgibbsIDTVEC(trap14)
31465942SgibbsIDTVEC(trap15)
31565942SgibbsIDTVEC(trap16)
316102669SgibbsIDTVEC(trap17)
31765942SgibbsIDTVEC(trap18)
31865942SgibbsIDTVEC(trap19)
31965942SgibbsIDTVEC(trap1a)
320104231SgibbsIDTVEC(trap1b)
321114621SgibbsIDTVEC(trap1c)
32265942SgibbsIDTVEC(trap1d)
32365942SgibbsIDTVEC(trap1e)
32465942SgibbsIDTVEC(trap1f)
325102669Sgibbs	/* 20 - 31 reserved for future exp */
32665942Sgibbs	ZTRAP(T_RESERVED)
32765942Sgibbs
32865942SgibbsIDTVEC(exceptions)
32965942Sgibbs	.quad	_C_LABEL(Xtrap00), _C_LABEL(Xtrap01)
33065942Sgibbs	.quad	_C_LABEL(Xtrap02), _C_LABEL(Xtrap03)
33165942Sgibbs	.quad	_C_LABEL(Xtrap04), _C_LABEL(Xtrap05)
33265942Sgibbs	.quad	_C_LABEL(Xtrap06), _C_LABEL(Xtrap07)
33365942Sgibbs	.quad	_C_LABEL(Xtrap08), _C_LABEL(Xtrap09)
33465942Sgibbs	.quad	_C_LABEL(Xtrap0a), _C_LABEL(Xtrap0b)
33565942Sgibbs	.quad	_C_LABEL(Xtrap0c), _C_LABEL(Xtrap0d)
33665942Sgibbs	.quad	_C_LABEL(Xtrap0e), _C_LABEL(Xtrap0f)
33765942Sgibbs	.quad	_C_LABEL(Xtrap10), _C_LABEL(Xtrap11)
33865942Sgibbs	.quad	_C_LABEL(Xtrap12), _C_LABEL(Xtrap13)
33965942Sgibbs	.quad	_C_LABEL(Xtrap14), _C_LABEL(Xtrap15)
34065942Sgibbs	.quad	_C_LABEL(Xtrap16), _C_LABEL(Xtrap17)
34165942Sgibbs	.quad	_C_LABEL(Xtrap18), _C_LABEL(Xtrap19)
34265942Sgibbs	.quad	_C_LABEL(Xtrap1a), _C_LABEL(Xtrap1b)
343102669Sgibbs	.quad	_C_LABEL(Xtrap1c), _C_LABEL(Xtrap1d)
344102669Sgibbs	.quad	_C_LABEL(Xtrap1e), _C_LABEL(Xtrap1f)
34565942Sgibbs
34665942Sgibbs/*
347102669Sgibbs * trap() calls here when it detects a fault in INTRFASTEXIT (loading the
34865942Sgibbs * segment registers or during the iret itself).
34965942Sgibbs * The address of the (possibly reconstructed) user trap frame is
350102669Sgibbs * passed as an argument.
351102669Sgibbs * Typically the code will have raised a SIGSEGV which will be actioned
352102669Sgibbs * by the code below.
353102669Sgibbs */
35465942Sgibbs_C_LABEL(trap_return_fault_return):	.globl	trap_return_fault_return
355102669Sgibbs	mov	%rdi,%rsp		/* frame for user return */
35665942Sgibbs#ifdef DIAGNOSTIC
357102669Sgibbs	/* We can't recover the saved %rbx, so suppress warning */
35865942Sgibbs	movl	CPUVAR(ILEVEL),%ebx
35965942Sgibbs#endif /* DIAGNOSTIC */
36065942Sgibbs	jmp	.Lalltraps_checkusr
36165942Sgibbs
36265942Sgibbs/*
36365942Sgibbs * All traps go through here. Call the generic trap handler, and
364102669Sgibbs * check for ASTs afterwards.
36565942Sgibbs */
36665942SgibbsNENTRY(alltraps)
367102669Sgibbs	INTRENTRY
36865942Sgibbs  	STI(si)
369102669Sgibbs
37065942Sgibbscalltrap:
37165942Sgibbs#ifdef DIAGNOSTIC
372102669Sgibbs	movl	CPUVAR(ILEVEL),%ebx
37365942Sgibbs#endif /* DIAGNOSTIC */
37465942Sgibbs	movq	%rsp,%rdi
37565942Sgibbs	incq	CPUVAR(NTRAP)
37665942Sgibbs	call	_C_LABEL(trap)
377102669Sgibbs.Lalltraps_checkusr:
37865942Sgibbs	testb	$SEL_RPL,TF_CS(%rsp)
37965942Sgibbs	jz	6f
380102669Sgibbs.Lalltraps_checkast:
38165942Sgibbs	movq	CPUVAR(CURLWP),%r14
38265942Sgibbs	/* Check for ASTs on exit to user mode. */
38365942Sgibbs  	CLI(si)
38479874Sgibbs	CHECK_ASTPENDING(%r14)
38565942Sgibbs	je	3f
38665942Sgibbs	CLEAR_ASTPENDING(%r14)
38765942Sgibbs  	STI(si)
38879874Sgibbs	movl	$T_ASTFLT,TF_TRAPNO(%rsp)
38979874Sgibbs	movq	%rsp,%rdi
39065942Sgibbs	incq	CPUVAR(NTRAP)
39165942Sgibbs	call	_C_LABEL(trap)
39265942Sgibbs	jmp	.Lalltraps_checkast	/* re-check ASTs */
39365942Sgibbs3:	CHECK_DEFERRED_SWITCH
39479874Sgibbs	jnz	9f
39565942Sgibbs#ifndef DIAGNOSTIC
39665942Sgibbs6:	INTRFASTEXIT
39765942Sgibbs#else /* DIAGNOSTIC */
39879874Sgibbs6:	cmpl	CPUVAR(ILEVEL),%ebx
39979874Sgibbs	jne	3f
40065942Sgibbs	INTRFASTEXIT
40165942Sgibbs3:  	STI(si)
40265942Sgibbs	movabsq	$4f,%rdi
40365942Sgibbs	movl	CPUVAR(ILEVEL),%esi
40479874Sgibbs	movl	%ebx,%edx
40565942Sgibbs	xorq	%rax,%rax
40679874Sgibbs	call	_C_LABEL(printf)
40779874Sgibbs	movl	%ebx,%edi
40879874Sgibbs	call	_C_LABEL(spllower)
40979874Sgibbs	jmp	.Lalltraps_checkast
41079874Sgibbs4:	.asciz	"WARNING: SPL NOT LOWERED ON TRAP EXIT %x %x\n"
41179874Sgibbs#endif /* DIAGNOSTIC */
412114621Sgibbs9:	STI(si)
41365942Sgibbs	call	_C_LABEL(do_pmap_load)
41465942Sgibbs	jmp	.Lalltraps_checkast	/* re-check ASTs */
415
416
417#define __HAVE_GENERIC_SOFT_INTERRUPTS	/* XXX */
418
419
420/*
421 * Macros for interrupt entry, call to handler, and exit.
422 *
423 * XXX
424 * The interrupt frame is set up to look like a trap frame.  This may be a
425 * waste.  The only handler which needs a frame is the clock handler, and it
426 * only needs a few bits.  Xdoreti() needs a trap frame for handling ASTs, but
427 * it could easily convert the frame on demand.
428 *
429 * The direct costs of setting up a trap frame are two pushq's (error code and
430 * trap number), an addl to get rid of these, and pushing and popping the
431 * callee-saved registers %esi, %edi, %ebx, and %ebp twice.
432 *
433 * If the interrupt frame is made more flexible,  INTR can push %eax first and
434 * decide the ipending case with less overhead, e.g., by avoiding loading the
435 * segment registers.
436 *
437 */
438
439/* XXX See comment in locore.s */
440#ifdef __ELF__
441#define	XINTR(name,num)		Xintr_ ## name ## num
442#else
443#define	XINTR(name,num)		_Xintr_ ## name ## num
444#endif
445
446#if NLAPIC > 0
447#ifdef MULTIPROCESSOR
448IDTVEC(recurse_lapic_ipi)
449	INTR_RECURSE_HWFRAME
450	pushq	$0
451	pushq	$T_ASTFLT
452	INTRENTRY
453	jmp	1f
454
455IDTVEC(intr_lapic_ipi)
456	pushq	$0
457	pushq	$T_ASTFLT
458	INTRENTRY
459	movl	$0,_C_LABEL(local_apic)+LAPIC_EOI
460	movl	CPUVAR(ILEVEL),%ebx
461	cmpl	$IPL_HIGH,%ebx
462	jae	2f
463IDTVEC(resume_lapic_ipi)
4641:
465	incl	CPUVAR(IDEPTH)
466	movl	$IPL_HIGH,CPUVAR(ILEVEL)
467        sti
468	pushq	%rbx
469	call	_C_LABEL(x86_ipi_handler)
470	jmp	_C_LABEL(Xdoreti)
4712:
472	orl	$(1 << LIR_IPI),CPUVAR(IPENDING)
473	INTRFASTEXIT
474
475#if defined(DDB)
476IDTVEC(intrddb)
4771:
478	pushq	$0
479	pushq	$T_BPTFLT
480	INTRENTRY
481	movl	$0xf,%eax
482	movq	%rax,%cr8
483	movl	$0,_C_LABEL(local_apic)+LAPIC_EOI
484	sti
485	call	_C_LABEL(ddb_ipi)
486	xorl	%eax,%eax
487	movq	%rax,%cr8
488	INTRFASTEXIT
489#endif /* DDB */
490#endif /* MULTIPROCESSOR */
491
492	/*
493	 * Interrupt from the local APIC timer.
494	 */
495IDTVEC(recurse_lapic_ltimer)
496	INTR_RECURSE_HWFRAME
497	pushq	$0
498	pushq	$T_ASTFLT
499	INTRENTRY
500	jmp	1f
501
502IDTVEC(intr_lapic_ltimer)
503	pushq	$0
504	pushq	$T_ASTFLT
505	INTRENTRY
506	movl	$0,_C_LABEL(local_apic)+LAPIC_EOI
507	movl	CPUVAR(ILEVEL),%ebx
508	cmpl	$IPL_CLOCK,%ebx
509	jae	2f
510IDTVEC(resume_lapic_ltimer)
5111:
512	incl	CPUVAR(IDEPTH)
513	movl	$IPL_CLOCK,CPUVAR(ILEVEL)
514	sti
515	pushq	%rbx
516	movq	%rsp,%rsi
517	xorq	%rdi,%rdi
518	call	_C_LABEL(lapic_clockintr)
519	jmp	_C_LABEL(Xdoreti)
5202:
521	orl	$(1 << LIR_TIMER),CPUVAR(IPENDING)
522	INTRFASTEXIT
523#endif /* NLAPIC > 0 */
524
525#ifndef XEN
526/*
527 * TLB shootdown handler.
528 */
529IDTVEC(intr_lapic_tlb)
530	pushq	$0
531	pushq	$T_ASTFLT
532	INTRENTRY
533	movl	$0, _C_LABEL(local_apic)+LAPIC_EOI
534	callq	_C_LABEL(pmap_tlb_intr)
535	INTRFASTEXIT
536
537#endif /* !XEN */
538
539#define voidop(num)
540
541#ifndef XEN
542
543/*
544 * This macro defines the generic stub code. Its arguments modifiy it
545 * for specific PICs.
546 */
547
548#define	INTRSTUB(name, num, early_ack, late_ack, mask, unmask, level_mask) \
549IDTVEC(recurse_ ## name ## num)						;\
550	INTR_RECURSE_HWFRAME						;\
551	subq	$8,%rsp							;\
552	pushq	$T_ASTFLT		/* trap # for doing ASTs */	;\
553	INTRENTRY							;\
554IDTVEC(resume_ ## name ## num)						\
555	movq	$IREENT_MAGIC,TF_ERR(%rsp)				;\
556	movl	%ebx,%r13d						;\
557	movq	CPUVAR(ISOURCES) + (num) * 8, %r14			;\
558	movl	IS_MAXLEVEL(%r14),%ebx					;\
559	jmp	1f							;\
560IDTVEC(intr_ ## name ## num)						;\
561	pushq	$0			/* dummy error code */		;\
562	pushq	$T_ASTFLT		/* trap # for doing ASTs */	;\
563	INTRENTRY							;\
564	movq	CPUVAR(ISOURCES) + (num) * 8, %r14			;\
565	mask(num)			/* mask it in hardware */	;\
566	early_ack(num)			/* and allow other intrs */	;\
567	testq	%r14,%r14						;\
568	jz	9f			/* stray */			;\
569	movl	IS_MAXLEVEL(%r14),%ebx					;\
570	movl	CPUVAR(ILEVEL),%r13d					;\
571	cmpl	%ebx,%r13d						;\
572	jae	10f			/* currently masked; hold it */	;\
573	incq	CPUVAR(NINTR)		/* statistical info */		;\
574	incq	IS_EVCNT(%r14)						;\
5751:									\
576	pushq	%r13							;\
577	movl	%ebx,CPUVAR(ILEVEL)					;\
578	sti								;\
579	incl	CPUVAR(IDEPTH)						;\
580	movq	IS_HANDLERS(%r14),%rbx					;\
5816:									\
582	movl	IH_LEVEL(%rbx),%r12d					;\
583	cmpl	%r13d,%r12d						;\
584	jle	7f							;\
585	movq	%rsp,%rsi						;\
586	movq	IH_ARG(%rbx),%rdi					;\
587	movl	%r12d,CPUVAR(ILEVEL)					;\
588	call	*IH_FUN(%rbx)		/* call it */			;\
589	movq	IH_NEXT(%rbx),%rbx	/* next handler in chain */	;\
590	testq	%rbx,%rbx						;\
591	jnz	6b							;\
5925:									\
593	cli								;\
594	unmask(num)			/* unmask it in hardware */	;\
595	late_ack(num)							;\
596	sti								;\
597	jmp	_C_LABEL(Xdoreti)	/* lower spl and do ASTs */	;\
5987:									\
599	cli								;\
600	orl     $(1 << num),CPUVAR(IPENDING)				;\
601	level_mask(num)							;\
602	late_ack(num)							;\
603	sti								;\
604	jmp	_C_LABEL(Xdoreti)	/* lower spl and do ASTs */	;\
60510:									\
606	cli								;\
607	orl     $(1 << num),CPUVAR(IPENDING)				;\
608	level_mask(num)							;\
609	late_ack(num)							;\
610	INTRFASTEXIT							;\
6119:									\
612	unmask(num)							;\
613	late_ack(num)							;\
614	INTRFASTEXIT
615
616#define ICUADDR IO_ICU1
617
618INTRSTUB(legacy,0,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
619    voidop)
620INTRSTUB(legacy,1,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
621    voidop)
622INTRSTUB(legacy,2,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
623    voidop)
624INTRSTUB(legacy,3,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
625    voidop)
626INTRSTUB(legacy,4,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
627    voidop)
628INTRSTUB(legacy,5,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
629    voidop)
630INTRSTUB(legacy,6,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
631    voidop)
632INTRSTUB(legacy,7,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
633    voidop)
634#undef ICUADDR
635#define ICUADDR IO_ICU2
636
637INTRSTUB(legacy,8,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
638    voidop)
639INTRSTUB(legacy,9,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
640    voidop)
641INTRSTUB(legacy,10,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
642    voidop)
643INTRSTUB(legacy,11,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
644    voidop)
645INTRSTUB(legacy,12,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
646    voidop)
647INTRSTUB(legacy,13,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
648    voidop)
649INTRSTUB(legacy,14,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
650    voidop)
651INTRSTUB(legacy,15,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
652    voidop)
653
654#if NIOAPIC > 0
655
656INTRSTUB(ioapic_edge,0,voidop,ioapic_asm_ack,voidop,voidop,voidop)
657INTRSTUB(ioapic_edge,1,voidop,ioapic_asm_ack,voidop,voidop,voidop)
658INTRSTUB(ioapic_edge,2,voidop,ioapic_asm_ack,voidop,voidop,voidop)
659INTRSTUB(ioapic_edge,3,voidop,ioapic_asm_ack,voidop,voidop,voidop)
660INTRSTUB(ioapic_edge,4,voidop,ioapic_asm_ack,voidop,voidop,voidop)
661INTRSTUB(ioapic_edge,5,voidop,ioapic_asm_ack,voidop,voidop,voidop)
662INTRSTUB(ioapic_edge,6,voidop,ioapic_asm_ack,voidop,voidop,voidop)
663INTRSTUB(ioapic_edge,7,voidop,ioapic_asm_ack,voidop,voidop,voidop)
664INTRSTUB(ioapic_edge,8,voidop,ioapic_asm_ack,voidop,voidop,voidop)
665INTRSTUB(ioapic_edge,9,voidop,ioapic_asm_ack,voidop,voidop,voidop)
666INTRSTUB(ioapic_edge,10,voidop,ioapic_asm_ack,voidop,voidop,voidop)
667INTRSTUB(ioapic_edge,11,voidop,ioapic_asm_ack,voidop,voidop,voidop)
668INTRSTUB(ioapic_edge,12,voidop,ioapic_asm_ack,voidop,voidop,voidop)
669INTRSTUB(ioapic_edge,13,voidop,ioapic_asm_ack,voidop,voidop,voidop)
670INTRSTUB(ioapic_edge,14,voidop,ioapic_asm_ack,voidop,voidop,voidop)
671INTRSTUB(ioapic_edge,15,voidop,ioapic_asm_ack,voidop,voidop,voidop)
672INTRSTUB(ioapic_edge,16,voidop,ioapic_asm_ack,voidop,voidop,voidop)
673INTRSTUB(ioapic_edge,17,voidop,ioapic_asm_ack,voidop,voidop,voidop)
674INTRSTUB(ioapic_edge,18,voidop,ioapic_asm_ack,voidop,voidop,voidop)
675INTRSTUB(ioapic_edge,19,voidop,ioapic_asm_ack,voidop,voidop,voidop)
676INTRSTUB(ioapic_edge,20,voidop,ioapic_asm_ack,voidop,voidop,voidop)
677INTRSTUB(ioapic_edge,21,voidop,ioapic_asm_ack,voidop,voidop,voidop)
678INTRSTUB(ioapic_edge,22,voidop,ioapic_asm_ack,voidop,voidop,voidop)
679INTRSTUB(ioapic_edge,23,voidop,ioapic_asm_ack,voidop,voidop,voidop)
680INTRSTUB(ioapic_edge,24,voidop,ioapic_asm_ack,voidop,voidop,voidop)
681INTRSTUB(ioapic_edge,25,voidop,ioapic_asm_ack,voidop,voidop,voidop)
682INTRSTUB(ioapic_edge,26,voidop,ioapic_asm_ack,voidop,voidop,voidop)
683INTRSTUB(ioapic_edge,27,voidop,ioapic_asm_ack,voidop,voidop,voidop)
684INTRSTUB(ioapic_edge,28,voidop,ioapic_asm_ack,voidop,voidop,voidop)
685INTRSTUB(ioapic_edge,29,voidop,ioapic_asm_ack,voidop,voidop,voidop)
686INTRSTUB(ioapic_edge,30,voidop,ioapic_asm_ack,voidop,voidop,voidop)
687INTRSTUB(ioapic_edge,31,voidop,ioapic_asm_ack,voidop,voidop,voidop)
688
689INTRSTUB(ioapic_level,0,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
690INTRSTUB(ioapic_level,1,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
691INTRSTUB(ioapic_level,2,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
692INTRSTUB(ioapic_level,3,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
693INTRSTUB(ioapic_level,4,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
694INTRSTUB(ioapic_level,5,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
695INTRSTUB(ioapic_level,6,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
696INTRSTUB(ioapic_level,7,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
697INTRSTUB(ioapic_level,8,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
698INTRSTUB(ioapic_level,9,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
699INTRSTUB(ioapic_level,10,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
700INTRSTUB(ioapic_level,11,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
701INTRSTUB(ioapic_level,12,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
702INTRSTUB(ioapic_level,13,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
703INTRSTUB(ioapic_level,14,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
704INTRSTUB(ioapic_level,15,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
705INTRSTUB(ioapic_level,16,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
706INTRSTUB(ioapic_level,17,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
707INTRSTUB(ioapic_level,18,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
708INTRSTUB(ioapic_level,19,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
709INTRSTUB(ioapic_level,20,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
710INTRSTUB(ioapic_level,21,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
711INTRSTUB(ioapic_level,22,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
712INTRSTUB(ioapic_level,23,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
713INTRSTUB(ioapic_level,24,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
714INTRSTUB(ioapic_level,25,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
715INTRSTUB(ioapic_level,26,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
716INTRSTUB(ioapic_level,27,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
717INTRSTUB(ioapic_level,28,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
718INTRSTUB(ioapic_level,29,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
719INTRSTUB(ioapic_level,30,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
720INTRSTUB(ioapic_level,31,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
721
722#endif
723
724.globl _C_LABEL(i8259_stubs)
725_C_LABEL(i8259_stubs):
726	.quad _C_LABEL(Xintr_legacy0), _C_LABEL(Xrecurse_legacy0)
727	.quad _C_LABEL(Xresume_legacy0)
728	.quad _C_LABEL(Xintr_legacy1), _C_LABEL(Xrecurse_legacy1)
729	.quad _C_LABEL(Xresume_legacy1)
730	.quad _C_LABEL(Xintr_legacy2), _C_LABEL(Xrecurse_legacy2)
731	.quad _C_LABEL(Xresume_legacy2)
732	.quad _C_LABEL(Xintr_legacy3), _C_LABEL(Xrecurse_legacy3)
733	.quad _C_LABEL(Xresume_legacy3)
734	.quad _C_LABEL(Xintr_legacy4), _C_LABEL(Xrecurse_legacy4)
735	.quad _C_LABEL(Xresume_legacy4)
736	.quad _C_LABEL(Xintr_legacy5), _C_LABEL(Xrecurse_legacy5)
737	.quad _C_LABEL(Xresume_legacy5)
738	.quad _C_LABEL(Xintr_legacy6), _C_LABEL(Xrecurse_legacy6)
739	.quad _C_LABEL(Xresume_legacy6)
740	.quad _C_LABEL(Xintr_legacy7), _C_LABEL(Xrecurse_legacy7)
741	.quad _C_LABEL(Xresume_legacy7)
742	.quad _C_LABEL(Xintr_legacy8), _C_LABEL(Xrecurse_legacy8)
743	.quad _C_LABEL(Xresume_legacy8)
744	.quad _C_LABEL(Xintr_legacy9), _C_LABEL(Xrecurse_legacy9)
745	.quad _C_LABEL(Xresume_legacy9)
746	.quad _C_LABEL(Xintr_legacy10), _C_LABEL(Xrecurse_legacy10)
747	.quad _C_LABEL(Xresume_legacy10)
748	.quad _C_LABEL(Xintr_legacy11), _C_LABEL(Xrecurse_legacy11)
749	.quad _C_LABEL(Xresume_legacy11)
750	.quad _C_LABEL(Xintr_legacy12), _C_LABEL(Xrecurse_legacy12)
751	.quad _C_LABEL(Xresume_legacy12)
752	.quad _C_LABEL(Xintr_legacy13), _C_LABEL(Xrecurse_legacy13)
753	.quad _C_LABEL(Xresume_legacy13)
754	.quad _C_LABEL(Xintr_legacy14), _C_LABEL(Xrecurse_legacy14)
755	.quad _C_LABEL(Xresume_legacy14)
756	.quad _C_LABEL(Xintr_legacy15), _C_LABEL(Xrecurse_legacy15)
757	.quad _C_LABEL(Xresume_legacy15)
758
759#if NIOAPIC > 0
760.globl _C_LABEL(ioapic_edge_stubs)
761_C_LABEL(ioapic_edge_stubs):
762	.quad _C_LABEL(Xintr_ioapic_edge0), _C_LABEL(Xrecurse_ioapic_edge0)
763	.quad _C_LABEL(Xresume_ioapic_edge0)
764	.quad _C_LABEL(Xintr_ioapic_edge1), _C_LABEL(Xrecurse_ioapic_edge1)
765	.quad _C_LABEL(Xresume_ioapic_edge1)
766	.quad _C_LABEL(Xintr_ioapic_edge2), _C_LABEL(Xrecurse_ioapic_edge2)
767	.quad _C_LABEL(Xresume_ioapic_edge2)
768	.quad _C_LABEL(Xintr_ioapic_edge3), _C_LABEL(Xrecurse_ioapic_edge3)
769	.quad _C_LABEL(Xresume_ioapic_edge3)
770	.quad _C_LABEL(Xintr_ioapic_edge4), _C_LABEL(Xrecurse_ioapic_edge4)
771	.quad _C_LABEL(Xresume_ioapic_edge4)
772	.quad _C_LABEL(Xintr_ioapic_edge5), _C_LABEL(Xrecurse_ioapic_edge5)
773	.quad _C_LABEL(Xresume_ioapic_edge5)
774	.quad _C_LABEL(Xintr_ioapic_edge6), _C_LABEL(Xrecurse_ioapic_edge6)
775	.quad _C_LABEL(Xresume_ioapic_edge6)
776	.quad _C_LABEL(Xintr_ioapic_edge7), _C_LABEL(Xrecurse_ioapic_edge7)
777	.quad _C_LABEL(Xresume_ioapic_edge7)
778	.quad _C_LABEL(Xintr_ioapic_edge8), _C_LABEL(Xrecurse_ioapic_edge8)
779	.quad _C_LABEL(Xresume_ioapic_edge8)
780	.quad _C_LABEL(Xintr_ioapic_edge9), _C_LABEL(Xrecurse_ioapic_edge9)
781	.quad _C_LABEL(Xresume_ioapic_edge9)
782	.quad _C_LABEL(Xintr_ioapic_edge10), _C_LABEL(Xrecurse_ioapic_edge10)
783	.quad _C_LABEL(Xresume_ioapic_edge10)
784	.quad _C_LABEL(Xintr_ioapic_edge11), _C_LABEL(Xrecurse_ioapic_edge11)
785	.quad _C_LABEL(Xresume_ioapic_edge11)
786	.quad _C_LABEL(Xintr_ioapic_edge12), _C_LABEL(Xrecurse_ioapic_edge12)
787	.quad _C_LABEL(Xresume_ioapic_edge12)
788	.quad _C_LABEL(Xintr_ioapic_edge13), _C_LABEL(Xrecurse_ioapic_edge13)
789	.quad _C_LABEL(Xresume_ioapic_edge13)
790	.quad _C_LABEL(Xintr_ioapic_edge14), _C_LABEL(Xrecurse_ioapic_edge14)
791	.quad _C_LABEL(Xresume_ioapic_edge14)
792	.quad _C_LABEL(Xintr_ioapic_edge15), _C_LABEL(Xrecurse_ioapic_edge15)
793	.quad _C_LABEL(Xresume_ioapic_edge15)
794	.quad _C_LABEL(Xintr_ioapic_edge16), _C_LABEL(Xrecurse_ioapic_edge16)
795	.quad _C_LABEL(Xresume_ioapic_edge16)
796	.quad _C_LABEL(Xintr_ioapic_edge17), _C_LABEL(Xrecurse_ioapic_edge17)
797	.quad _C_LABEL(Xresume_ioapic_edge17)
798	.quad _C_LABEL(Xintr_ioapic_edge18), _C_LABEL(Xrecurse_ioapic_edge18)
799	.quad _C_LABEL(Xresume_ioapic_edge18)
800	.quad _C_LABEL(Xintr_ioapic_edge19), _C_LABEL(Xrecurse_ioapic_edge19)
801	.quad _C_LABEL(Xresume_ioapic_edge19)
802	.quad _C_LABEL(Xintr_ioapic_edge20), _C_LABEL(Xrecurse_ioapic_edge20)
803	.quad _C_LABEL(Xresume_ioapic_edge20)
804	.quad _C_LABEL(Xintr_ioapic_edge21), _C_LABEL(Xrecurse_ioapic_edge21)
805	.quad _C_LABEL(Xresume_ioapic_edge21)
806	.quad _C_LABEL(Xintr_ioapic_edge22), _C_LABEL(Xrecurse_ioapic_edge22)
807	.quad _C_LABEL(Xresume_ioapic_edge22)
808	.quad _C_LABEL(Xintr_ioapic_edge23), _C_LABEL(Xrecurse_ioapic_edge23)
809	.quad _C_LABEL(Xresume_ioapic_edge23)
810	.quad _C_LABEL(Xintr_ioapic_edge24), _C_LABEL(Xrecurse_ioapic_edge24)
811	.quad _C_LABEL(Xresume_ioapic_edge24)
812	.quad _C_LABEL(Xintr_ioapic_edge25), _C_LABEL(Xrecurse_ioapic_edge25)
813	.quad _C_LABEL(Xresume_ioapic_edge25)
814	.quad _C_LABEL(Xintr_ioapic_edge26), _C_LABEL(Xrecurse_ioapic_edge26)
815	.quad _C_LABEL(Xresume_ioapic_edge26)
816	.quad _C_LABEL(Xintr_ioapic_edge27), _C_LABEL(Xrecurse_ioapic_edge27)
817	.quad _C_LABEL(Xresume_ioapic_edge27)
818	.quad _C_LABEL(Xintr_ioapic_edge28), _C_LABEL(Xrecurse_ioapic_edge28)
819	.quad _C_LABEL(Xresume_ioapic_edge28)
820	.quad _C_LABEL(Xintr_ioapic_edge29), _C_LABEL(Xrecurse_ioapic_edge29)
821	.quad _C_LABEL(Xresume_ioapic_edge29)
822	.quad _C_LABEL(Xintr_ioapic_edge30), _C_LABEL(Xrecurse_ioapic_edge30)
823	.quad _C_LABEL(Xresume_ioapic_edge30)
824	.quad _C_LABEL(Xintr_ioapic_edge31), _C_LABEL(Xrecurse_ioapic_edge31)
825	.quad _C_LABEL(Xresume_ioapic_edge31)
826
827.globl _C_LABEL(ioapic_level_stubs)
828_C_LABEL(ioapic_level_stubs):
829	.quad _C_LABEL(Xintr_ioapic_level0), _C_LABEL(Xrecurse_ioapic_level0)
830	.quad _C_LABEL(Xresume_ioapic_level0)
831	.quad _C_LABEL(Xintr_ioapic_level1), _C_LABEL(Xrecurse_ioapic_level1)
832	.quad _C_LABEL(Xresume_ioapic_level1)
833	.quad _C_LABEL(Xintr_ioapic_level2), _C_LABEL(Xrecurse_ioapic_level2)
834	.quad _C_LABEL(Xresume_ioapic_level2)
835	.quad _C_LABEL(Xintr_ioapic_level3), _C_LABEL(Xrecurse_ioapic_level3)
836	.quad _C_LABEL(Xresume_ioapic_level3)
837	.quad _C_LABEL(Xintr_ioapic_level4), _C_LABEL(Xrecurse_ioapic_level4)
838	.quad _C_LABEL(Xresume_ioapic_level4)
839	.quad _C_LABEL(Xintr_ioapic_level5), _C_LABEL(Xrecurse_ioapic_level5)
840	.quad _C_LABEL(Xresume_ioapic_level5)
841	.quad _C_LABEL(Xintr_ioapic_level6), _C_LABEL(Xrecurse_ioapic_level6)
842	.quad _C_LABEL(Xresume_ioapic_level6)
843	.quad _C_LABEL(Xintr_ioapic_level7), _C_LABEL(Xrecurse_ioapic_level7)
844	.quad _C_LABEL(Xresume_ioapic_level7)
845	.quad _C_LABEL(Xintr_ioapic_level8), _C_LABEL(Xrecurse_ioapic_level8)
846	.quad _C_LABEL(Xresume_ioapic_level8)
847	.quad _C_LABEL(Xintr_ioapic_level9), _C_LABEL(Xrecurse_ioapic_level9)
848	.quad _C_LABEL(Xresume_ioapic_level9)
849	.quad _C_LABEL(Xintr_ioapic_level10), _C_LABEL(Xrecurse_ioapic_level10)
850	.quad _C_LABEL(Xresume_ioapic_level10)
851	.quad _C_LABEL(Xintr_ioapic_level11), _C_LABEL(Xrecurse_ioapic_level11)
852	.quad _C_LABEL(Xresume_ioapic_level11)
853	.quad _C_LABEL(Xintr_ioapic_level12), _C_LABEL(Xrecurse_ioapic_level12)
854	.quad _C_LABEL(Xresume_ioapic_level12)
855	.quad _C_LABEL(Xintr_ioapic_level13), _C_LABEL(Xrecurse_ioapic_level13)
856	.quad _C_LABEL(Xresume_ioapic_level13)
857	.quad _C_LABEL(Xintr_ioapic_level14), _C_LABEL(Xrecurse_ioapic_level14)
858	.quad _C_LABEL(Xresume_ioapic_level14)
859	.quad _C_LABEL(Xintr_ioapic_level15), _C_LABEL(Xrecurse_ioapic_level15)
860	.quad _C_LABEL(Xresume_ioapic_level15)
861	.quad _C_LABEL(Xintr_ioapic_level16), _C_LABEL(Xrecurse_ioapic_level16)
862	.quad _C_LABEL(Xresume_ioapic_level16)
863	.quad _C_LABEL(Xintr_ioapic_level17), _C_LABEL(Xrecurse_ioapic_level17)
864	.quad _C_LABEL(Xresume_ioapic_level17)
865	.quad _C_LABEL(Xintr_ioapic_level18), _C_LABEL(Xrecurse_ioapic_level18)
866	.quad _C_LABEL(Xresume_ioapic_level18)
867	.quad _C_LABEL(Xintr_ioapic_level19), _C_LABEL(Xrecurse_ioapic_level19)
868	.quad _C_LABEL(Xresume_ioapic_level19)
869	.quad _C_LABEL(Xintr_ioapic_level20), _C_LABEL(Xrecurse_ioapic_level20)
870	.quad _C_LABEL(Xresume_ioapic_level20)
871	.quad _C_LABEL(Xintr_ioapic_level21), _C_LABEL(Xrecurse_ioapic_level21)
872	.quad _C_LABEL(Xresume_ioapic_level21)
873	.quad _C_LABEL(Xintr_ioapic_level22), _C_LABEL(Xrecurse_ioapic_level22)
874	.quad _C_LABEL(Xresume_ioapic_level22)
875	.quad _C_LABEL(Xintr_ioapic_level23), _C_LABEL(Xrecurse_ioapic_level23)
876	.quad _C_LABEL(Xresume_ioapic_level23)
877	.quad _C_LABEL(Xintr_ioapic_level24), _C_LABEL(Xrecurse_ioapic_level24)
878	.quad _C_LABEL(Xresume_ioapic_level24)
879	.quad _C_LABEL(Xintr_ioapic_level25), _C_LABEL(Xrecurse_ioapic_level25)
880	.quad _C_LABEL(Xresume_ioapic_level25)
881	.quad _C_LABEL(Xintr_ioapic_level26), _C_LABEL(Xrecurse_ioapic_level26)
882	.quad _C_LABEL(Xresume_ioapic_level26)
883	.quad _C_LABEL(Xintr_ioapic_level27), _C_LABEL(Xrecurse_ioapic_level27)
884	.quad _C_LABEL(Xresume_ioapic_level27)
885	.quad _C_LABEL(Xintr_ioapic_level28), _C_LABEL(Xrecurse_ioapic_level28)
886	.quad _C_LABEL(Xresume_ioapic_level28)
887	.quad _C_LABEL(Xintr_ioapic_level29), _C_LABEL(Xrecurse_ioapic_level29)
888	.quad _C_LABEL(Xresume_ioapic_level29)
889	.quad _C_LABEL(Xintr_ioapic_level30), _C_LABEL(Xrecurse_ioapic_level30)
890	.quad _C_LABEL(Xresume_ioapic_level30)
891	.quad _C_LABEL(Xintr_ioapic_level31), _C_LABEL(Xrecurse_ioapic_level31)
892	.quad _C_LABEL(Xresume_ioapic_level31)
893#endif
894
895#else	/* !XEN */
896/* Resume/recurse procedures for spl() */
897#define	XENINTRSTUB(name, num, early_ack, late_ack, mask, unmask, level_mask) \
898IDTVEC(recurse_ ## name ## num)						;\
899  	INTR_RECURSE_HWFRAME						;\
900  	subq	$8,%rsp							;\
901  	pushq	$T_ASTFLT		/* trap # for doing ASTs */	;\
902  	INTRENTRY							;\
903IDTVEC(resume_ ## name ## num)						\
904  	movq	$IREENT_MAGIC,TF_ERR(%rsp)				;\
905  	movl	%ebx,%r13d						;\
906  	movq	CPUVAR(ISOURCES) + (num) * 8, %r14			;\
9071:									\
908  	pushq	%r13							;\
909  	movl	$num,CPUVAR(ILEVEL)					;\
910  	STI(si)								;\
911  	incl	CPUVAR(IDEPTH)						;\
912  	movq	IS_HANDLERS(%r14),%rbx					;\
9136:									\
914  	movq	IH_ARG(%rbx),%rdi					;\
915  	movq	%rsp,%rsi						;\
916  	call	*IH_FUN(%rbx)		/* call it */			;\
917  	movq	IH_IPL_NEXT(%rbx),%rbx	/* next handler in chain */	;\
918  	testq	%rbx,%rbx						;\
919  	jnz	6b							;\
9205:									\
921  	CLI(si)								;\
922  	unmask(num)			/* unmask it in hardware */	;\
923  	late_ack(num)							;\
924  	STI(si)								;\
925  	jmp	_C_LABEL(Xdoreti)	/* lower spl and do ASTs */	;\
926
927# The unmask func for Xen events
928#define hypervisor_asm_unmask(num)			\
929  	movq $num, %rdi					;\
930  	call _C_LABEL(hypervisor_enable_ipl)
931
932XENINTRSTUB(xenev,0,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
933XENINTRSTUB(xenev,1,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
934XENINTRSTUB(xenev,2,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
935XENINTRSTUB(xenev,3,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
936XENINTRSTUB(xenev,4,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
937XENINTRSTUB(xenev,5,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
938XENINTRSTUB(xenev,6,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
939XENINTRSTUB(xenev,7,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
940XENINTRSTUB(xenev,8,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
941XENINTRSTUB(xenev,9,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
942XENINTRSTUB(xenev,10,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
943XENINTRSTUB(xenev,11,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
944XENINTRSTUB(xenev,12,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
945XENINTRSTUB(xenev,13,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
946XENINTRSTUB(xenev,14,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
947XENINTRSTUB(xenev,15,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
948XENINTRSTUB(xenev,16,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
949XENINTRSTUB(xenev,17,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
950XENINTRSTUB(xenev,18,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
951XENINTRSTUB(xenev,19,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
952XENINTRSTUB(xenev,20,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
953XENINTRSTUB(xenev,21,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
954XENINTRSTUB(xenev,22,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
955XENINTRSTUB(xenev,23,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
956XENINTRSTUB(xenev,24,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
957XENINTRSTUB(xenev,25,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
958XENINTRSTUB(xenev,26,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
959XENINTRSTUB(xenev,27,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
960XENINTRSTUB(xenev,28,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
961XENINTRSTUB(xenev,29,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
962XENINTRSTUB(xenev,30,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
963XENINTRSTUB(xenev,31,voidop,voidop,voidop,hypervisor_asm_unmask,voidop)
964
965.globl _C_LABEL(xenev_stubs)
966_C_LABEL(xenev_stubs):
967  	.quad _C_LABEL(Xrecurse_xenev0), _C_LABEL(Xresume_xenev0)
968  	.quad _C_LABEL(Xrecurse_xenev1) ,_C_LABEL(Xresume_xenev1)
969  	.quad _C_LABEL(Xrecurse_xenev2) ,_C_LABEL(Xresume_xenev2)
970  	.quad _C_LABEL(Xrecurse_xenev3) ,_C_LABEL(Xresume_xenev3)
971  	.quad _C_LABEL(Xrecurse_xenev4) ,_C_LABEL(Xresume_xenev4)
972  	.quad _C_LABEL(Xrecurse_xenev5) ,_C_LABEL(Xresume_xenev5)
973  	.quad _C_LABEL(Xrecurse_xenev6) ,_C_LABEL(Xresume_xenev6)
974  	.quad _C_LABEL(Xrecurse_xenev7) ,_C_LABEL(Xresume_xenev7)
975  	.quad _C_LABEL(Xrecurse_xenev8) ,_C_LABEL(Xresume_xenev8)
976  	.quad _C_LABEL(Xrecurse_xenev9) ,_C_LABEL(Xresume_xenev9)
977  	.quad _C_LABEL(Xrecurse_xenev10), _C_LABEL(Xresume_xenev10)
978  	.quad _C_LABEL(Xrecurse_xenev11), _C_LABEL(Xresume_xenev11)
979  	.quad _C_LABEL(Xrecurse_xenev12), _C_LABEL(Xresume_xenev12)
980  	.quad _C_LABEL(Xrecurse_xenev13), _C_LABEL(Xresume_xenev13)
981  	.quad _C_LABEL(Xrecurse_xenev14), _C_LABEL(Xresume_xenev14)
982  	.quad _C_LABEL(Xrecurse_xenev15), _C_LABEL(Xresume_xenev15)
983  	.quad _C_LABEL(Xrecurse_xenev16), _C_LABEL(Xresume_xenev16)
984  	.quad _C_LABEL(Xrecurse_xenev17), _C_LABEL(Xresume_xenev17)
985  	.quad _C_LABEL(Xrecurse_xenev18), _C_LABEL(Xresume_xenev18)
986  	.quad _C_LABEL(Xrecurse_xenev19), _C_LABEL(Xresume_xenev19)
987  	.quad _C_LABEL(Xrecurse_xenev20), _C_LABEL(Xresume_xenev20)
988  	.quad _C_LABEL(Xrecurse_xenev21), _C_LABEL(Xresume_xenev21)
989  	.quad _C_LABEL(Xrecurse_xenev22), _C_LABEL(Xresume_xenev22)
990  	.quad _C_LABEL(Xrecurse_xenev23), _C_LABEL(Xresume_xenev23)
991  	.quad _C_LABEL(Xrecurse_xenev24), _C_LABEL(Xresume_xenev24)
992  	.quad _C_LABEL(Xrecurse_xenev25), _C_LABEL(Xresume_xenev25)
993  	.quad _C_LABEL(Xrecurse_xenev26), _C_LABEL(Xresume_xenev26)
994  	.quad _C_LABEL(Xrecurse_xenev27), _C_LABEL(Xresume_xenev27)
995  	.quad _C_LABEL(Xrecurse_xenev28), _C_LABEL(Xresume_xenev28)
996  	.quad _C_LABEL(Xrecurse_xenev29), _C_LABEL(Xresume_xenev29)
997  	.quad _C_LABEL(Xrecurse_xenev30), _C_LABEL(Xresume_xenev30)
998  	.quad _C_LABEL(Xrecurse_xenev31), _C_LABEL(Xresume_xenev31)
999
1000# Xen callbacks
1001
1002# Hypervisor callback
1003NENTRY(hypervisor_callback)
1004	movq (%rsp),%rcx
1005	movq 8(%rsp),%r11
1006	addq $16,%rsp
1007	pushq	$0	# Dummy error code
1008	pushq	$T_ASTFLT
1009	INTRENTRY
1010	# sti??
1011	movq	%rsp, %rdi
1012	subq	$8, %rdi; /* don't forget if_ppl */
1013	call	do_hypervisor_callback
1014	testb   $SEL_RPL,TF_CS(%rsp)
1015	jnz	doreti_checkast
10161:
1017	INTRFASTEXIT
1018
1019# Panic?
1020NENTRY(failsafe_callback)
1021	movq (%rsp),%rcx
1022	movq 8(%rsp),%r11
1023	addq $16,%rsp
1024	pushq	$0
1025	pushq	$T_ASTFLT
1026	INTRENTRY
1027	movq	%rsp, %rdi
1028	subq	$8, %rdi; /* don't forget if_ppl */
1029	call xen_failsafe_handler
1030	INTRFASTEXIT
1031#	jmp	HYPERVISOR_iret
1032
1033#endif	/* !XEN */
1034