1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (dm@engr.sgi.com)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 *    written by Carsten Langgaard, carstenl@mips.com
12 */
13#include <asm/asm.h>
14#include <asm/cachectl.h>
15#include <asm/fpregdef.h>
16#include <asm/mipsregs.h>
17#include <asm/asm-offsets.h>
18#include <asm/page.h>
19#include <asm/pgtable-bits.h>
20#include <asm/regdef.h>
21#include <asm/stackframe.h>
22#include <asm/thread_info.h>
23
24#include <asm/asmmacro.h>
25
26/*
27 * Offset to the current process status flags, the first 32 bytes of the
28 * stack are not used.
29 */
30#define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
31
32/*
33 * FPU context is saved iff the process has used it's FPU in the current
34 * time slice as indicated by _TIF_USEDFPU.  In any case, the CU1 bit for user
35 * space STATUS register should be 0, so that a process *always* starts its
36 * userland with FPU disabled after each context switch.
37 *
38 * FPU will be enabled as soon as the process accesses FPU again, through
39 * do_cpu() trap.
40 */
41
42/*
43 * task_struct *resume(task_struct *prev, task_struct *next,
44 *                     struct thread_info *next_ti)
45 */
46	.align	5
47	LEAF(resume)
48#ifndef CONFIG_CPU_HAS_LLSC
49	sw	zero, ll_bit
50#endif
51	mfc0	t1, CP0_STATUS
52	LONG_S	t1, THREAD_STATUS(a0)
53	cpu_save_nonscratch a0
54	LONG_S	ra, THREAD_REG31(a0)
55
56	/*
57	 * check if we need to save FPU registers
58	 */
59	PTR_L	t3, TASK_THREAD_INFO(a0)
60	LONG_L	t0, TI_FLAGS(t3)
61	li	t1, _TIF_USEDFPU
62	and	t2, t0, t1
63	beqz	t2, 1f
64	nor	t1, zero, t1
65
66	and	t0, t0, t1
67	LONG_S	t0, TI_FLAGS(t3)
68
69	/*
70	 * clear saved user stack CU1 bit
71	 */
72	LONG_L	t0, ST_OFF(t3)
73	li	t1, ~ST0_CU1
74	and	t0, t0, t1
75	LONG_S	t0, ST_OFF(t3)
76
77	fpu_save_double a0 t0 t1		# c0_status passed in t0
78						# clobbers t1
791:
80
81	/*
82	 * The order of restoring the registers takes care of the race
83	 * updating $28, $29 and kernelsp without disabling ints.
84	 */
85	move	$28, a2
86	cpu_restore_nonscratch a1
87
88#if (_THREAD_SIZE - 32) < 0x10000
89	PTR_ADDIU	t0, $28, _THREAD_SIZE - 32
90#else
91	PTR_LI		t0, _THREAD_SIZE - 32
92	PTR_ADDU	t0, $28
93#endif
94	set_saved_sp	t0, t1, t2
95#ifdef CONFIG_MIPS_MT_SMTC
96	/* Read-modify-writes of Status must be atomic on a VPE */
97	mfc0	t2, CP0_TCSTATUS
98	ori	t1, t2, TCSTATUS_IXMT
99	mtc0	t1, CP0_TCSTATUS
100	andi	t2, t2, TCSTATUS_IXMT
101	_ehb
102	DMT	8				# dmt	t0
103	move	t1,ra
104	jal	mips_ihb
105	move	ra,t1
106#endif /* CONFIG_MIPS_MT_SMTC */
107	mfc0	t1, CP0_STATUS		/* Do we really need this? */
108	li	a3, 0xff01
109	and	t1, a3
110	LONG_L	a2, THREAD_STATUS(a1)
111	nor	a3, $0, a3
112	and	a2, a3
113	or	a2, t1
114	mtc0	a2, CP0_STATUS
115#ifdef CONFIG_MIPS_MT_SMTC
116	_ehb
117	andi	t0, t0, VPECONTROL_TE
118	beqz	t0, 1f
119	emt
1201:
121	mfc0	t1, CP0_TCSTATUS
122	xori	t1, t1, TCSTATUS_IXMT
123	or	t1, t1, t2
124	mtc0	t1, CP0_TCSTATUS
125	_ehb
126#endif /* CONFIG_MIPS_MT_SMTC */
127	move	v0, a0
128	jr	ra
129	END(resume)
130
131/*
132 * Save a thread's fp context.
133 */
134LEAF(_save_fp)
135#ifdef CONFIG_64BIT
136	mfc0	t0, CP0_STATUS
137#endif
138	fpu_save_double a0 t0 t1		# clobbers t1
139	jr	ra
140	END(_save_fp)
141
142/*
143 * Restore a thread's fp context.
144 */
145LEAF(_restore_fp)
146#ifdef CONFIG_64BIT
147	mfc0	t0, CP0_STATUS
148#endif
149	fpu_restore_double a0 t0 t1		# clobbers t1
150	jr	ra
151	END(_restore_fp)
152
153/*
154 * Load the FPU with signalling NANS.  This bit pattern we're using has
155 * the property that no matter whether considered as single or as double
156 * precision represents signaling NANS.
157 *
158 * We initialize fcr31 to rounding to nearest, no exceptions.
159 */
160
161#define FPU_DEFAULT  0x00000000
162
163LEAF(_init_fpu)
164#ifdef CONFIG_MIPS_MT_SMTC
165	/* Rather than manipulate per-VPE Status, set per-TC bit in TCStatus */
166	mfc0	t0, CP0_TCSTATUS
167	/* Bit position is the same for Status, TCStatus */
168	li	t1, ST0_CU1
169	or	t0, t1
170	mtc0	t0, CP0_TCSTATUS
171#else /* Normal MIPS CU1 enable */
172	mfc0	t0, CP0_STATUS
173	li	t1, ST0_CU1
174	or	t0, t1
175	mtc0	t0, CP0_STATUS
176#endif /* CONFIG_MIPS_MT_SMTC */
177	enable_fpu_hazard
178
179	li	t1, FPU_DEFAULT
180	ctc1	t1, fcr31
181
182	li	t1, -1				# SNaN
183
184#ifdef CONFIG_64BIT
185	sll	t0, t0, 5
186	bgez	t0, 1f				# 16 / 32 register mode?
187
188	dmtc1	t1, $f1
189	dmtc1	t1, $f3
190	dmtc1	t1, $f5
191	dmtc1	t1, $f7
192	dmtc1	t1, $f9
193	dmtc1	t1, $f11
194	dmtc1	t1, $f13
195	dmtc1	t1, $f15
196	dmtc1	t1, $f17
197	dmtc1	t1, $f19
198	dmtc1	t1, $f21
199	dmtc1	t1, $f23
200	dmtc1	t1, $f25
201	dmtc1	t1, $f27
202	dmtc1	t1, $f29
203	dmtc1	t1, $f31
2041:
205#endif
206
207#ifdef CONFIG_CPU_MIPS32
208	mtc1	t1, $f0
209	mtc1	t1, $f1
210	mtc1	t1, $f2
211	mtc1	t1, $f3
212	mtc1	t1, $f4
213	mtc1	t1, $f5
214	mtc1	t1, $f6
215	mtc1	t1, $f7
216	mtc1	t1, $f8
217	mtc1	t1, $f9
218	mtc1	t1, $f10
219	mtc1	t1, $f11
220	mtc1	t1, $f12
221	mtc1	t1, $f13
222	mtc1	t1, $f14
223	mtc1	t1, $f15
224	mtc1	t1, $f16
225	mtc1	t1, $f17
226	mtc1	t1, $f18
227	mtc1	t1, $f19
228	mtc1	t1, $f20
229	mtc1	t1, $f21
230	mtc1	t1, $f22
231	mtc1	t1, $f23
232	mtc1	t1, $f24
233	mtc1	t1, $f25
234	mtc1	t1, $f26
235	mtc1	t1, $f27
236	mtc1	t1, $f28
237	mtc1	t1, $f29
238	mtc1	t1, $f30
239	mtc1	t1, $f31
240#else
241	.set	mips3
242	dmtc1	t1, $f0
243	dmtc1	t1, $f2
244	dmtc1	t1, $f4
245	dmtc1	t1, $f6
246	dmtc1	t1, $f8
247	dmtc1	t1, $f10
248	dmtc1	t1, $f12
249	dmtc1	t1, $f14
250	dmtc1	t1, $f16
251	dmtc1	t1, $f18
252	dmtc1	t1, $f20
253	dmtc1	t1, $f22
254	dmtc1	t1, $f24
255	dmtc1	t1, $f26
256	dmtc1	t1, $f28
257	dmtc1	t1, $f30
258#endif
259	jr	ra
260	END(_init_fpu)
261