1/*	$NetBSD: locore.h,v 1.21 2011/02/04 03:23:33 uwe Exp $	*/
2
3/*-
4 * Copyright (c) 2002 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 *    notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 *    notice, this list of conditions and the following disclaimer in the
14 *    documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 * POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#ifdef _LOCORE
30
31#ifdef __STDC__
32#if defined(SH3) && defined(SH4)
33#define	MOV(x, r)	mov.l .L_ ## x, r; mov.l @r, r
34#define	REG_SYMBOL(x)	.L_ ## x:	.long	_C_LABEL(__sh_ ## x)
35#define	FUNC_SYMBOL(x)	.L_ ## x:	.long	_C_LABEL(__sh_ ## x)
36#elif defined(SH3)
37#define	MOV(x, r)	mov.l .L_ ## x, r
38#define	REG_SYMBOL(x)	.L_ ## x:	.long	SH3_ ## x
39#define	FUNC_SYMBOL(x)	.L_ ## x:	.long	_C_LABEL(sh3_ ## x)
40#elif defined(SH4)
41#define	MOV(x, r)	mov.l .L_ ## x, r
42#define	REG_SYMBOL(x)	.L_ ## x:	.long	SH4_ ## x
43#define	FUNC_SYMBOL(x)	.L_ ## x:	.long	_C_LABEL(sh4_ ## x)
44#endif /* SH3 && SH4 */
45#else /* !__STDC__ */
46#if defined(SH3) && defined(SH4)
47#define	MOV(x, r)	mov.l .L_/**/x, r; mov.l @r, r
48#define	REG_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(__sh_/**/x)
49#define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(__sh_/**/x)
50#elif defined(SH3)
51#define	MOV(x, r)	mov.l .L_/**/x, r
52#define	REG_SYMBOL(x)	.L_/**/x:	.long	SH3_/**/x
53#define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(sh3_/**/x)
54#elif defined(SH4)
55#define	MOV(x, r)	mov.l .L_/**/x, r
56#define	REG_SYMBOL(x)	.L_/**/x:	.long	SH4_/**/x
57#define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(sh4_/**/x)
58#endif /* SH3 && SH4 */
59#endif /* __STDC__ */
60
61/*
62 * BANK1 r6 contains current trapframe pointer.
63 * BANK1 r7 contains bottom address of lwp's kernel stack.
64 */
65/*
66 * __EXCEPTION_ENTRY:
67 *	+ setup stack pointer
68 *	+ save all registers to trapframe.
69 *	+ setup kernel stack.
70 *	+ change bank from 1 to 0
71 *	+ NB: interrupt vector "knows" that r0_bank1 = ssp
72 */
73#define	__EXCEPTION_ENTRY						;\
74	/* Check kernel/user mode. */					;\
75	mov	#0x40,	r3						;\
76	stc	ssr,	r2	/* r2 = SSR */				;\
77	swap.b	r3,	r3						;\
78	mov	r14,	r1						;\
79	swap.w	r3,	r3	/* r3 = PSL_MD */			;\
80	mov	r6,	r14	/* trapframe pointer */			;\
81	tst	r3,	r2	/* if (SSR.MD == 0) T = 1 */		;\
82	mov.l	r1,	@-r14	/* save tf_r14 */			;\
83	bf/s	1f		/* T==0 ...Exception from kernel mode */;\
84	 mov	r15,	r0						;\
85	/* Exception from user mode */					;\
86	mov	r7,	r15	/* change to kernel stack */		;\
871:									;\
88	/* Save remaining registers */					;\
89	mov.l	r0,	@-r14	/* tf_r15 */				;\
90	stc.l	r0_bank,@-r14	/* tf_r0  */				;\
91	stc.l	r1_bank,@-r14	/* tf_r1  */				;\
92	stc.l	r2_bank,@-r14	/* tf_r2  */				;\
93	stc.l	r3_bank,@-r14	/* tf_r3  */				;\
94	stc.l	r4_bank,@-r14	/* tf_r4  */				;\
95	stc.l	r5_bank,@-r14	/* tf_r5  */				;\
96	stc.l	r6_bank,@-r14	/* tf_r6  */				;\
97	stc.l	r7_bank,@-r14	/* tf_r7  */				;\
98	mov.l	r8,	@-r14	/* tf_r8  */				;\
99	mov.l	r9,	@-r14	/* tf_r9  */				;\
100	mov.l	r10,	@-r14	/* tf_r10 */				;\
101	mov.l	r11,	@-r14	/* tf_r11 */				;\
102	mov.l	r12,	@-r14	/* tf_r12 */				;\
103	mov.l	r13,	@-r14	/* tf_r13 */				;\
104	sts.l	pr,	@-r14	/* tf_pr  */				;\
105	sts.l	mach,	@-r14	/* tf_mach*/				;\
106	sts.l	macl,	@-r14	/* tf_macl*/				;\
107	stc.l	gbr,	@-r14	/* tf_gbr */				;\
108	mov.l	r2,	@-r14	/* tf_ssr */				;\
109	stc.l	spc,	@-r14	/* tf_spc */				;\
110	add	#-8,	r14	/* skip tf_ubc, tf_expevt */		;\
111	mov	r14,	r6	/* store frame pointer */		;\
112	/* Change register bank to 0 */					;\
113	shlr	r3		/* r3 = PSL_RB */			;\
114	stc	sr,	r1	/* r1 = SR */				;\
115	not	r3,	r3						;\
116	and	r1,	r3						;\
117	ldc	r3,	sr	/* SR.RB = 0 */
118
119
120/*
121 * __EXCEPTION_RETURN:
122 *	+ block exceptions
123 *	+ restore all registers from stack.
124 *	+ rte.
125 */
126#define	__EXCEPTION_RETURN						;\
127	mov	#0x10,	r0						;\
128	swap.b	r0,	r0						;\
129	swap.w	r0,	r0	/* r0 = 0x10000000 */			;\
130	stc	sr,	r1						;\
131	or	r0,	r1						;\
132	ldc	r1,	sr	/* SR.BL = 1 */				;\
133	stc	r6_bank,r0						;\
134	mov	r0,	r14						;\
135	add	#TF_SIZE, r0						;\
136	ldc	r0,	r6_bank	/* roll up frame pointer */		;\
137	add	#8,	r14	/* skip tf_expevt, tf_ubc */		;\
138	ldc.l	@r14+,	spc	/* tf_spc */				;\
139	ldc.l	@r14+,	ssr	/* tf_ssr */				;\
140	ldc.l	@r14+,	gbr	/* tf_gbr */				;\
141	lds.l	@r14+,	macl	/* tf_macl*/				;\
142	lds.l	@r14+,	mach	/* tf_mach*/				;\
143	lds.l	@r14+,	pr	/* tf_pr  */				;\
144	mov.l	@r14+,	r13	/* tf_r13 */				;\
145	mov.l	@r14+,	r12	/* tf_r12 */				;\
146	mov.l	@r14+,	r11	/* tf_r11 */				;\
147	mov.l	@r14+,	r10	/* tf_r10 */				;\
148	mov.l	@r14+,	r9	/* tf_r9  */				;\
149	mov.l	@r14+,	r8	/* tf_r8  */				;\
150	mov.l	@r14+,	r7	/* tf_r7  */				;\
151	mov.l	@r14+,	r6	/* tf_r6  */				;\
152	mov.l	@r14+,	r5	/* tf_r5  */				;\
153	mov.l	@r14+,	r4	/* tf_r4  */				;\
154	mov.l	@r14+,	r3	/* tf_r3  */				;\
155	mov.l	@r14+,	r2	/* tf_r2  */				;\
156	mov.l	@r14+,	r1	/* tf_r1  */				;\
157	mov.l	@r14+,	r0	/* tf_r0  */				;\
158	mov.l	@r14+	r15	/* tf_r15 */				;\
159	mov.l	@r14+,	r14	/* tf_r14 */				;\
160	rte								;\
161	 nop
162
163
164/*
165 * Macros to disable and enable exceptions (including interrupts).
166 * This modifies SR.BL
167 */
168
169#define	__EXCEPTION_BLOCK(Rn, Rm)					;\
170	mov	#0x10,	Rn						;\
171	swap.b	Rn,	Rn						;\
172	swap.w	Rn,	Rn	/* Rn = 0x10000000 */			;\
173	stc	sr,	Rm						;\
174	or	Rm,	Rn						;\
175	ldc	Rn,	sr	/* block exceptions */
176
177#define	__EXCEPTION_UNBLOCK(Rn, Rm)					;\
178	mov	#0xef,	Rn	/* ~0x10 */				;\
179	swap.b	Rn,	Rn						;\
180	swap.w	Rn,	Rn	/* Rn = ~0x10000000 */			;\
181	stc	sr,	Rm						;\
182	and	Rn,	Rm						;\
183	ldc	Rm,	sr	/* unblock exceptions */
184
185/*
186 * Macros to disable and enable interrupts.
187 * This modifies SR.I[0-3]
188 */
189#define	__INTR_MASK(Rn, Rm)						;\
190	mov	#0x78,	Rn						;\
191	shll	Rn		/* Rn = 0x000000f0 */			;\
192	stc	sr,	Rm						;\
193	or	Rn,	Rm						;\
194	ldc	Rm,	sr	/* mask all interrupts */
195
196#define	__INTR_UNMASK(Rn, Rm)						;\
197	mov	#0x78,	Rn						;\
198	shll	Rn		/* Rn = 0x000000f0 */			;\
199	not	Rn,	Rn						;\
200	stc	sr,	Rm						;\
201	and	Rn,	Rm						;\
202	ldc	Rm,	sr	/* unmask all interrupts */
203
204
205/*
206 * Since __INTR_MASK + __EXCEPTION_UNBLOCK is common sequence, provide
207 * this combo version that does stc/ldc just once.
208 */
209#define __INTR_MASK_EXCEPTION_UNBLOCK(Rs, Ri, Rb)			 \
210	mov	#0x78, Ri	/* 0xf0 >> 1 */				;\
211	mov	#0xef, Rb	/* ~0x10 */				;\
212	shll	Ri		/* Ri = PSL_IMASK */			;\
213	swap.b	Rb, Rb							;\
214	stc	sr, Rs							;\
215	swap.w	Rb, Rb		/* Rb = ~PSL_BL */			;\
216	or	Ri, Rs		/* SR |= PSL_IMASK */			;\
217	and	Rb, Rs		/* SR &= ~PSL_BL */			;\
218	ldc	Rs, sr
219
220
221#else /* !_LOCORE */
222
223void sh3_switch_setup(struct lwp *);
224void sh4_switch_setup(struct lwp *);
225void sh3_switch_resume(struct lwp *);
226void sh4_switch_resume(struct lwp *);
227extern void (*__sh_switch_resume)(struct lwp *);
228
229#endif /* !_LOCORE */
230