1#include <linux/config.h>
2
3#include "entry.h"
4
5/*
6 * A couple of convenience macros that make writing and reading
7 * SAVE_MIN and SAVE_REST easier.
8 */
9#define rARPR		r31
10#define rCRIFS		r30
11#define rCRIPSR		r29
12#define rCRIIP		r28
13#define rARRSC		r27
14#define rARPFS		r26
15#define rARUNAT		r25
16#define rARRNAT		r24
17#define rARBSPSTORE	r23
18#define rKRBS		r22
19#define rB6		r21
20#define rR1		r20
21
22/*
23 * Here start the source dependent macros.
24 */
25
26/*
27 * For ivt.s we want to access the stack virtually so we dont have to disable translation
28 * on interrupts.
29 */
30#define MINSTATE_START_SAVE_MIN_VIRT								\
31	dep r1=-1,r1,61,3;				/* r1 = current (virtual) */		\
32(pUser)	mov ar.rsc=0;		/* set enforced lazy mode, pl 0, little-endian, loadrs=0 */	\
33	;;											\
34(pUser)	addl rKRBS=IA64_RBS_OFFSET,r1;			/* compute base of RBS */		\
35(pUser)	mov rARRNAT=ar.rnat;									\
36(pKern) mov r1=sp;					/* get sp  */				\
37	;;											\
38(pUser)	addl r1=IA64_STK_OFFSET-IA64_PT_REGS_SIZE,r1;	/* compute base of memory stack */	\
39(pUser)	mov rARBSPSTORE=ar.bspstore;			/* save ar.bspstore */			\
40	;;											\
41(pKern) addl r1=-IA64_PT_REGS_SIZE,r1;			/* if in kernel mode, use sp (r12) */	\
42(pUser)	mov ar.bspstore=rKRBS;				/* switch to kernel RBS */		\
43	;;											\
44(pUser)	mov r18=ar.bsp;										\
45(pUser)	mov ar.rsc=0x3;		/* set eager mode, pl 0, little-endian, loadrs=0 */		\
46
47#define MINSTATE_END_SAVE_MIN_VIRT								\
48	or r13=r13,r14;		/* make `current' a kernel virtual address */			\
49	bsw.1;			/* switch back to bank 1 (must be last in insn group) */	\
50	;;
51
52/*
53 * For mca_asm.S we want to access the stack physically since the state is saved before we
54 * go virtual and dont want to destroy the iip or ipsr.
55 */
56#define MINSTATE_START_SAVE_MIN_PHYS								\
57(pKern) movl sp=ia64_init_stack+IA64_STK_OFFSET-IA64_PT_REGS_SIZE;				\
58(pUser)	mov ar.rsc=0;		/* set enforced lazy mode, pl 0, little-endian, loadrs=0 */	\
59(pUser)	addl rKRBS=IA64_RBS_OFFSET,r1;		/* compute base of register backing store */	\
60	;;											\
61(pUser)	mov rARRNAT=ar.rnat;									\
62(pKern) dep r1=0,sp,61,3;				/* compute physical addr of sp	*/	\
63(pUser)	addl r1=IA64_STK_OFFSET-IA64_PT_REGS_SIZE,r1;	/* compute base of memory stack */	\
64(pUser)	mov rARBSPSTORE=ar.bspstore;			/* save ar.bspstore */			\
65(pUser)	dep rKRBS=-1,rKRBS,61,3;			/* compute kernel virtual addr of RBS */\
66	;;											\
67(pKern) addl r1=-IA64_PT_REGS_SIZE,r1;		/* if in kernel mode, use sp (r12) */		\
68(pUser)	mov ar.bspstore=rKRBS;			/* switch to kernel RBS */			\
69	;;											\
70(pUser)	mov r18=ar.bsp;										\
71(pUser)	mov ar.rsc=0x3;		/* set eager mode, pl 0, little-endian, loadrs=0 */		\
72
73#define MINSTATE_END_SAVE_MIN_PHYS								\
74	or r12=r12,r14;		/* make sp a kernel virtual address */				\
75	or r13=r13,r14;		/* make `current' a kernel virtual address */			\
76	;;
77
78#ifdef MINSTATE_VIRT
79# define MINSTATE_START_SAVE_MIN	MINSTATE_START_SAVE_MIN_VIRT
80# define MINSTATE_END_SAVE_MIN		MINSTATE_END_SAVE_MIN_VIRT
81#endif
82
83#ifdef MINSTATE_PHYS
84# define MINSTATE_START_SAVE_MIN	MINSTATE_START_SAVE_MIN_PHYS
85# define MINSTATE_END_SAVE_MIN		MINSTATE_END_SAVE_MIN_PHYS
86#endif
87
88/*
89 * DO_SAVE_MIN switches to the kernel stacks (if necessary) and saves
90 * the minimum state necessary that allows us to turn psr.ic back
91 * on.
92 *
93 * Assumed state upon entry:
94 *	psr.ic: off
95 *	r31:	contains saved predicates (pr)
96 *
97 * Upon exit, the state is as follows:
98 *	psr.ic: off
99 *	r2 = points to &pt_regs.r16
100 *	r12 = kernel sp (kernel virtual address)
101 *	r13 = points to current task_struct (kernel virtual address)
102 *	p15 = TRUE if psr.i is set in cr.ipsr
103 *	predicate registers (other than p2, p3, and p15), b6, r3, r8, r9, r10, r11, r14, r15:
104 *		preserved
105 *
106 * Note that psr.ic is NOT turned on by this macro.  This is so that
107 * we can pass interruption state as arguments to a handler.
108 */
109#define DO_SAVE_MIN(COVER,SAVE_IFS,EXTRA)							  \
110	mov rARRSC=ar.rsc;									  \
111	mov rARPFS=ar.pfs;									  \
112	mov rR1=r1;										  \
113	mov rARUNAT=ar.unat;									  \
114	mov rCRIPSR=cr.ipsr;									  \
115	mov rB6=b6;				/* rB6 = branch reg 6 */			  \
116	mov rCRIIP=cr.iip;									  \
117	mov r1=IA64_KR(CURRENT);		/* r1 = current (physical) */			  \
118	COVER;											  \
119	;;											  \
120	invala;											  \
121	extr.u r16=rCRIPSR,32,2;		/* extract psr.cpl */				  \
122	;;											  \
123	cmp.eq pKern,pUser=r0,r16;		/* are we in kernel mode already? (psr.cpl==0) */ \
124	/* switch from user to kernel RBS: */							  \
125	;;											  \
126	SAVE_IFS;										  \
127	MINSTATE_START_SAVE_MIN									  \
128	;;											  \
129	mov r16=r1;					/* initialize first base pointer */	  \
130	adds r17=8,r1;					/* initialize second base pointer */	  \
131	;;											  \
132	st8 [r16]=rCRIPSR,16;	/* save cr.ipsr */						  \
133	st8 [r17]=rCRIIP,16;	/* save cr.iip */						  \
134(pKern)	mov r18=r0;		/* make sure r18 isn't NaT */					  \
135	;;											  \
136	st8 [r16]=rCRIFS,16;	/* save cr.ifs */						  \
137	st8 [r17]=rARUNAT,16;	/* save ar.unat */						  \
138(pUser)	sub r18=r18,rKRBS;	/* r18=RSE.ndirty*8 */						  \
139	;;											  \
140	st8 [r16]=rARPFS,16;	/* save ar.pfs */						  \
141	st8 [r17]=rARRSC,16;	/* save ar.rsc */						  \
142	tbit.nz p15,p0=rCRIPSR,IA64_PSR_I_BIT							  \
143	;;			/* avoid RAW on r16 & r17 */					  \
144(pKern)	adds r16=16,r16;	/* skip over ar_rnat field */					  \
145(pKern)	adds r17=16,r17;	/* skip over ar_bspstore field */				  \
146(pUser)	st8 [r16]=rARRNAT,16;	/* save ar.rnat */						  \
147(pUser)	st8 [r17]=rARBSPSTORE,16;	/* save ar.bspstore */					  \
148	;;											  \
149	st8 [r16]=rARPR,16;	/* save predicates */						  \
150	st8 [r17]=rB6,16;	/* save b6 */							  \
151	shl r18=r18,16;		/* compute ar.rsc to be used for "loadrs" */			  \
152	;;											  \
153	st8 [r16]=r18,16;	/* save ar.rsc value for "loadrs" */				  \
154	st8.spill [r17]=rR1,16;	/* save original r1 */						  \
155	;;											  \
156.mem.offset 0,0;	st8.spill [r16]=r2,16;							  \
157.mem.offset 8,0;	st8.spill [r17]=r3,16;							  \
158	adds r2=IA64_PT_REGS_R16_OFFSET,r1;							  \
159	;;											  \
160.mem.offset 0,0;		st8.spill [r16]=r12,16;						  \
161.mem.offset 8,0;		st8.spill [r17]=r13,16;						  \
162	cmp.eq pNonSys,pSys=r0,r0	/* initialize pSys=0, pNonSys=1 */			  \
163	;;											  \
164.mem.offset 0,0;		st8.spill [r16]=r14,16;						  \
165.mem.offset 8,0;		st8.spill [r17]=r15,16;						  \
166	dep r14=-1,r0,61,3;									  \
167	;;											  \
168.mem.offset 0,0;		st8.spill [r16]=r8,16;						  \
169.mem.offset 8,0;		st8.spill [r17]=r9,16;						  \
170	adds r12=-16,r1;	/* switch to kernel memory stack (with 16 bytes of scratch) */	  \
171	;;											  \
172.mem.offset 0,0;		st8.spill [r16]=r10,16;						  \
173.mem.offset 8,0;		st8.spill [r17]=r11,16;						  \
174	mov r13=IA64_KR(CURRENT);	/* establish `current' */				  \
175	;;											  \
176	EXTRA;											  \
177	movl r1=__gp;		/* establish kernel global pointer */				  \
178	;;											  \
179	MINSTATE_END_SAVE_MIN
180
181/*
182 * SAVE_REST saves the remainder of pt_regs (with psr.ic on).  This
183 * macro guarantees to preserve all predicate registers, r8, r9, r10,
184 * r11, r14, and r15.
185 *
186 * Assumed state upon entry:
187 *	psr.ic: on
188 *	r2:	points to &pt_regs.r16
189 *	r3:	points to &pt_regs.r17
190 */
191#define SAVE_REST				\
192.mem.offset 0,0;	st8.spill [r2]=r16,16;	\
193.mem.offset 8,0;	st8.spill [r3]=r17,16;	\
194	;;					\
195.mem.offset 0,0;	st8.spill [r2]=r18,16;	\
196.mem.offset 8,0;	st8.spill [r3]=r19,16;	\
197	;;					\
198	mov r16=ar.ccv;		/* M-unit */	\
199	movl r18=FPSR_DEFAULT	/* L-unit */	\
200	;;					\
201	mov r17=ar.fpsr;	/* M-unit */	\
202	mov ar.fpsr=r18;	/* M-unit */	\
203	;;					\
204.mem.offset 0,0;	st8.spill [r2]=r20,16;	\
205.mem.offset 8,0;	st8.spill [r3]=r21,16;	\
206	mov r18=b0;				\
207	;;					\
208.mem.offset 0,0;	st8.spill [r2]=r22,16;	\
209.mem.offset 8,0;	st8.spill [r3]=r23,16;	\
210	mov r19=b7;				\
211	;;					\
212.mem.offset 0,0;	st8.spill [r2]=r24,16;	\
213.mem.offset 8,0;	st8.spill [r3]=r25,16;	\
214	;;					\
215.mem.offset 0,0;	st8.spill [r2]=r26,16;	\
216.mem.offset 8,0;	st8.spill [r3]=r27,16;	\
217	;;					\
218.mem.offset 0,0;	st8.spill [r2]=r28,16;	\
219.mem.offset 8,0;	st8.spill [r3]=r29,16;	\
220	;;					\
221.mem.offset 0,0;	st8.spill [r2]=r30,16;	\
222.mem.offset 8,0;	st8.spill [r3]=r31,16;	\
223	;;					\
224	st8 [r2]=r16,16;	/* ar.ccv */	\
225	st8 [r3]=r17,16;	/* ar.fpsr */	\
226	;;					\
227	st8 [r2]=r18,16;	/* b0 */	\
228	st8 [r3]=r19,16+8;	/* b7 */	\
229	;;					\
230	stf.spill [r2]=f6,32;			\
231	stf.spill [r3]=f7,32;			\
232	;;					\
233	stf.spill [r2]=f8,32;			\
234	stf.spill [r3]=f9,32
235
236#define SAVE_MIN_WITH_COVER	DO_SAVE_MIN(cover, mov rCRIFS=cr.ifs,)
237#define SAVE_MIN_WITH_COVER_R19	DO_SAVE_MIN(cover, mov rCRIFS=cr.ifs, mov r15=r19)
238#define SAVE_MIN		DO_SAVE_MIN(     , mov rCRIFS=r0, )
239