• Home
  • History
  • Annotate
  • Line#
  • Navigate
  • Raw
  • Download
  • only in /asuswrt-rt-n18u-9.0.0.4.380.2695/release/src-rt-6.x.4708/linux/linux-2.6.36/arch/powerpc/kernel/
1/*
2 * This file contains low level CPU setup functions.
3 *    Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org)
4 *
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License
7 * as published by the Free Software Foundation; either version
8 * 2 of the License, or (at your option) any later version.
9 *
10 */
11
12#include <asm/processor.h>
13#include <asm/page.h>
14#include <asm/cputable.h>
15#include <asm/ppc_asm.h>
16#include <asm/asm-offsets.h>
17#include <asm/cache.h>
18#include <asm/mmu.h>
19
20_GLOBAL(__setup_cpu_603)
21	mflr	r5
22BEGIN_MMU_FTR_SECTION
23	li	r10,0
24	mtspr	SPRN_SPRG_603_LRU,r10		/* init SW LRU tracking */
25END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
26BEGIN_FTR_SECTION
27	bl	__init_fpu_registers
28END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
29	bl	setup_common_caches
30	mtlr	r5
31	blr
32_GLOBAL(__setup_cpu_604)
33	mflr	r5
34	bl	setup_common_caches
35	bl	setup_604_hid0
36	mtlr	r5
37	blr
38_GLOBAL(__setup_cpu_750)
39	mflr	r5
40	bl	__init_fpu_registers
41	bl	setup_common_caches
42	bl	setup_750_7400_hid0
43	mtlr	r5
44	blr
45_GLOBAL(__setup_cpu_750cx)
46	mflr	r5
47	bl	__init_fpu_registers
48	bl	setup_common_caches
49	bl	setup_750_7400_hid0
50	bl	setup_750cx
51	mtlr	r5
52	blr
53_GLOBAL(__setup_cpu_750fx)
54	mflr	r5
55	bl	__init_fpu_registers
56	bl	setup_common_caches
57	bl	setup_750_7400_hid0
58	bl	setup_750fx
59	mtlr	r5
60	blr
61_GLOBAL(__setup_cpu_7400)
62	mflr	r5
63	bl	__init_fpu_registers
64	bl	setup_7400_workarounds
65	bl	setup_common_caches
66	bl	setup_750_7400_hid0
67	mtlr	r5
68	blr
69_GLOBAL(__setup_cpu_7410)
70	mflr	r5
71	bl	__init_fpu_registers
72	bl	setup_7410_workarounds
73	bl	setup_common_caches
74	bl	setup_750_7400_hid0
75	li	r3,0
76	mtspr	SPRN_L2CR2,r3
77	mtlr	r5
78	blr
79_GLOBAL(__setup_cpu_745x)
80	mflr	r5
81	bl	setup_common_caches
82	bl	setup_745x_specifics
83	mtlr	r5
84	blr
85
86/* Enable caches for 603's, 604, 750 & 7400 */
87setup_common_caches:
88	mfspr	r11,SPRN_HID0
89	andi.	r0,r11,HID0_DCE
90	ori	r11,r11,HID0_ICE|HID0_DCE
91	ori	r8,r11,HID0_ICFI
92	bne	1f			/* don't invalidate the D-cache */
93	ori	r8,r8,HID0_DCI		/* unless it wasn't enabled */
941:	sync
95	mtspr	SPRN_HID0,r8		/* enable and invalidate caches */
96	sync
97	mtspr	SPRN_HID0,r11		/* enable caches */
98	sync
99	isync
100	blr
101
102/* 604, 604e, 604ev, ...
103 * Enable superscalar execution & branch history table
104 */
105setup_604_hid0:
106	mfspr	r11,SPRN_HID0
107	ori	r11,r11,HID0_SIED|HID0_BHTE
108	ori	r8,r11,HID0_BTCD
109	sync
110	mtspr	SPRN_HID0,r8	/* flush branch target address cache */
111	sync			/* on 604e/604r */
112	mtspr	SPRN_HID0,r11
113	sync
114	isync
115	blr
116
117setup_7400_workarounds:
118	mfpvr	r3
119	rlwinm	r3,r3,0,20,31
120	cmpwi	0,r3,0x0207
121	ble	1f
122	blr
123setup_7410_workarounds:
124	mfpvr	r3
125	rlwinm	r3,r3,0,20,31
126	cmpwi	0,r3,0x0100
127	bnelr
1281:
129	mfspr	r11,SPRN_MSSSR0
130	/* Errata #3: Set L1OPQ_SIZE to 0x10 */
131	rlwinm	r11,r11,0,9,6
132	oris	r11,r11,0x0100
133	/* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
134	oris	r11,r11,0x0002
135	/* Errata #5: Set DRLT_SIZE to 0x01 */
136	rlwinm	r11,r11,0,5,2
137	oris	r11,r11,0x0800
138	sync
139	mtspr	SPRN_MSSSR0,r11
140	sync
141	isync
142	blr
143
144/* 740/750/7400/7410
145 * Enable Store Gathering (SGE), Address Brodcast (ABE),
146 * Branch History Table (BHTE), Branch Target ICache (BTIC)
147 * Dynamic Power Management (DPM), Speculative (SPD)
148 * Clear Instruction cache throttling (ICTC)
149 */
150setup_750_7400_hid0:
151	mfspr	r11,SPRN_HID0
152	ori	r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
153	oris	r11,r11,HID0_DPM@h
154BEGIN_FTR_SECTION
155	xori	r11,r11,HID0_BTIC
156END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
157BEGIN_FTR_SECTION
158	xoris	r11,r11,HID0_DPM@h	/* disable dynamic power mgmt */
159END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
160	li	r3,HID0_SPD
161	andc	r11,r11,r3		/* clear SPD: enable speculative */
162 	li	r3,0
163 	mtspr	SPRN_ICTC,r3		/* Instruction Cache Throttling off */
164	isync
165	mtspr	SPRN_HID0,r11
166	sync
167	isync
168	blr
169
170/* 750cx specific
171 * Looks like we have to disable NAP feature for some PLL settings...
172 * (waiting for confirmation)
173 */
174setup_750cx:
175	mfspr	r10, SPRN_HID1
176	rlwinm	r10,r10,4,28,31
177	cmpwi	cr0,r10,7
178	cmpwi	cr1,r10,9
179	cmpwi	cr2,r10,11
180	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq
181	cror	4*cr0+eq,4*cr0+eq,4*cr2+eq
182	bnelr
183	lwz	r6,CPU_SPEC_FEATURES(r4)
184	li	r7,CPU_FTR_CAN_NAP
185	andc	r6,r6,r7
186	stw	r6,CPU_SPEC_FEATURES(r4)
187	blr
188
189/* 750fx specific
190 */
191setup_750fx:
192	blr
193
194/* MPC 745x
195 * Enable Store Gathering (SGE), Branch Folding (FOLD)
196 * Branch History Table (BHTE), Branch Target ICache (BTIC)
197 * Dynamic Power Management (DPM), Speculative (SPD)
198 * Ensure our data cache instructions really operate.
199 * Timebase has to be running or we wouldn't have made it here,
200 * just ensure we don't disable it.
201 * Clear Instruction cache throttling (ICTC)
202 * Enable L2 HW prefetch
203 */
204setup_745x_specifics:
205	/* We check for the presence of an L3 cache setup by
206	 * the firmware. If any, we disable NAP capability as
207	 * it's known to be bogus on rev 2.1 and earlier
208	 */
209BEGIN_FTR_SECTION
210	mfspr	r11,SPRN_L3CR
211	andis.	r11,r11,L3CR_L3E@h
212	beq	1f
213END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
214	lwz	r6,CPU_SPEC_FEATURES(r4)
215	andi.	r0,r6,CPU_FTR_L3_DISABLE_NAP
216	beq	1f
217	li	r7,CPU_FTR_CAN_NAP
218	andc	r6,r6,r7
219	stw	r6,CPU_SPEC_FEATURES(r4)
2201:
221	mfspr	r11,SPRN_HID0
222
223	/* All of the bits we have to set.....
224	 */
225	ori	r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
226	ori	r11,r11,HID0_LRSTK | HID0_BTIC
227	oris	r11,r11,HID0_DPM@h
228BEGIN_MMU_FTR_SECTION
229	oris	r11,r11,HID0_HIGH_BAT@h
230END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
231BEGIN_FTR_SECTION
232	xori	r11,r11,HID0_BTIC
233END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
234BEGIN_FTR_SECTION
235	xoris	r11,r11,HID0_DPM@h	/* disable dynamic power mgmt */
236END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
237
238	/* All of the bits we have to clear....
239	 */
240	li	r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
241	andc	r11,r11,r3		/* clear SPD: enable speculative */
242 	li	r3,0
243
244 	mtspr	SPRN_ICTC,r3		/* Instruction Cache Throttling off */
245	isync
246	mtspr	SPRN_HID0,r11
247	sync
248	isync
249
250	/* Enable L2 HW prefetch, if L2 is enabled
251	 */
252	mfspr	r3,SPRN_L2CR
253	andis.	r3,r3,L2CR_L2E@h
254	beqlr
255	mfspr	r3,SPRN_MSSCR0
256	ori	r3,r3,3
257	sync
258	mtspr	SPRN_MSSCR0,r3
259	sync
260	isync
261	blr
262
263_GLOBAL(__init_fpu_registers)
264	mfmsr	r10
265	ori	r11,r10,MSR_FP
266	mtmsr	r11
267	isync
268	addis	r9,r3,empty_zero_page@ha
269	addi	r9,r9,empty_zero_page@l
270	REST_32FPRS(0,r9)
271	sync
272	mtmsr	r10
273	isync
274	blr
275
276
277/* Definitions for the table use to save CPU states */
278#define CS_HID0		0
279#define CS_HID1		4
280#define CS_HID2		8
281#define	CS_MSSCR0	12
282#define CS_MSSSR0	16
283#define CS_ICTRL	20
284#define CS_LDSTCR	24
285#define CS_LDSTDB	28
286#define CS_SIZE		32
287
288	.data
289	.balign	L1_CACHE_BYTES
290cpu_state_storage:
291	.space	CS_SIZE
292	.balign	L1_CACHE_BYTES,0
293	.text
294
295/* Called in normal context to backup CPU 0 state. This
296 * does not include cache settings. This function is also
297 * called for machine sleep. This does not include the MMU
298 * setup, BATs, etc... but rather the "special" registers
299 * like HID0, HID1, MSSCR0, etc...
300 */
301_GLOBAL(__save_cpu_setup)
302	/* Some CR fields are volatile, we back it up all */
303	mfcr	r7
304
305	/* Get storage ptr */
306	lis	r5,cpu_state_storage@h
307	ori	r5,r5,cpu_state_storage@l
308
309	/* Save HID0 (common to all CONFIG_6xx cpus) */
310	mfspr	r3,SPRN_HID0
311	stw	r3,CS_HID0(r5)
312
313	/* Now deal with CPU type dependent registers */
314	mfspr	r3,SPRN_PVR
315	srwi	r3,r3,16
316	cmplwi	cr0,r3,0x8000	/* 7450 */
317	cmplwi	cr1,r3,0x000c	/* 7400 */
318	cmplwi	cr2,r3,0x800c	/* 7410 */
319	cmplwi	cr3,r3,0x8001	/* 7455 */
320	cmplwi	cr4,r3,0x8002	/* 7457 */
321	cmplwi	cr5,r3,0x8003	/* 7447A */
322	cmplwi	cr6,r3,0x7000	/* 750FX */
323	cmplwi	cr7,r3,0x8004	/* 7448 */
324	/* cr1 is 7400 || 7410 */
325	cror	4*cr1+eq,4*cr1+eq,4*cr2+eq
326	/* cr0 is 74xx */
327	cror	4*cr0+eq,4*cr0+eq,4*cr3+eq
328	cror	4*cr0+eq,4*cr0+eq,4*cr4+eq
329	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq
330	cror	4*cr0+eq,4*cr0+eq,4*cr5+eq
331	cror	4*cr0+eq,4*cr0+eq,4*cr7+eq
332	bne	1f
333	/* Backup 74xx specific regs */
334	mfspr	r4,SPRN_MSSCR0
335	stw	r4,CS_MSSCR0(r5)
336	mfspr	r4,SPRN_MSSSR0
337	stw	r4,CS_MSSSR0(r5)
338	beq	cr1,1f
339	/* Backup 745x specific registers */
340	mfspr	r4,SPRN_HID1
341	stw	r4,CS_HID1(r5)
342	mfspr	r4,SPRN_ICTRL
343	stw	r4,CS_ICTRL(r5)
344	mfspr	r4,SPRN_LDSTCR
345	stw	r4,CS_LDSTCR(r5)
346	mfspr	r4,SPRN_LDSTDB
347	stw	r4,CS_LDSTDB(r5)
3481:
349	bne	cr6,1f
350	/* Backup 750FX specific registers */
351	mfspr	r4,SPRN_HID1
352	stw	r4,CS_HID1(r5)
353	/* If rev 2.x, backup HID2 */
354	mfspr	r3,SPRN_PVR
355	andi.	r3,r3,0xff00
356	cmpwi	cr0,r3,0x0200
357	bne	1f
358	mfspr	r4,SPRN_HID2
359	stw	r4,CS_HID2(r5)
3601:
361	mtcr	r7
362	blr
363
364/* Called with no MMU context (typically MSR:IR/DR off) to
365 * restore CPU state as backed up by the previous
366 * function. This does not include cache setting
367 */
368_GLOBAL(__restore_cpu_setup)
369	/* Some CR fields are volatile, we back it up all */
370	mfcr	r7
371
372	/* Get storage ptr */
373	lis	r5,(cpu_state_storage-KERNELBASE)@h
374	ori	r5,r5,cpu_state_storage@l
375
376	/* Restore HID0 */
377	lwz	r3,CS_HID0(r5)
378	sync
379	isync
380	mtspr	SPRN_HID0,r3
381	sync
382	isync
383
384	/* Now deal with CPU type dependent registers */
385	mfspr	r3,SPRN_PVR
386	srwi	r3,r3,16
387	cmplwi	cr0,r3,0x8000	/* 7450 */
388	cmplwi	cr1,r3,0x000c	/* 7400 */
389	cmplwi	cr2,r3,0x800c	/* 7410 */
390	cmplwi	cr3,r3,0x8001	/* 7455 */
391	cmplwi	cr4,r3,0x8002	/* 7457 */
392	cmplwi	cr5,r3,0x8003	/* 7447A */
393	cmplwi	cr6,r3,0x7000	/* 750FX */
394	cmplwi	cr7,r3,0x8004	/* 7448 */
395	/* cr1 is 7400 || 7410 */
396	cror	4*cr1+eq,4*cr1+eq,4*cr2+eq
397	/* cr0 is 74xx */
398	cror	4*cr0+eq,4*cr0+eq,4*cr3+eq
399	cror	4*cr0+eq,4*cr0+eq,4*cr4+eq
400	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq
401	cror	4*cr0+eq,4*cr0+eq,4*cr5+eq
402	cror	4*cr0+eq,4*cr0+eq,4*cr7+eq
403	bne	2f
404	/* Restore 74xx specific regs */
405	lwz	r4,CS_MSSCR0(r5)
406	sync
407	mtspr	SPRN_MSSCR0,r4
408	sync
409	isync
410	lwz	r4,CS_MSSSR0(r5)
411	sync
412	mtspr	SPRN_MSSSR0,r4
413	sync
414	isync
415	bne	cr2,1f
416	/* Clear 7410 L2CR2 */
417	li	r4,0
418	mtspr	SPRN_L2CR2,r4
4191:	beq	cr1,2f
420	/* Restore 745x specific registers */
421	lwz	r4,CS_HID1(r5)
422	sync
423	mtspr	SPRN_HID1,r4
424	isync
425	sync
426	lwz	r4,CS_ICTRL(r5)
427	sync
428	mtspr	SPRN_ICTRL,r4
429	isync
430	sync
431	lwz	r4,CS_LDSTCR(r5)
432	sync
433	mtspr	SPRN_LDSTCR,r4
434	isync
435	sync
436	lwz	r4,CS_LDSTDB(r5)
437	sync
438	mtspr	SPRN_LDSTDB,r4
439	isync
440	sync
4412:	bne	cr6,1f
442	/* Restore 750FX specific registers
443	 * that is restore HID2 on rev 2.x and PLL config & switch
444	 * to PLL 0 on all
445	 */
446	/* If rev 2.x, restore HID2 with low voltage bit cleared */
447	mfspr	r3,SPRN_PVR
448	andi.	r3,r3,0xff00
449	cmpwi	cr0,r3,0x0200
450	bne	4f
451	lwz	r4,CS_HID2(r5)
452	rlwinm	r4,r4,0,19,17
453	mtspr	SPRN_HID2,r4
454	sync
4554:
456	lwz	r4,CS_HID1(r5)
457	rlwinm  r5,r4,0,16,14
458	mtspr	SPRN_HID1,r5
459		/* Wait for PLL to stabilize */
460	mftbl	r5
4613:	mftbl	r6
462	sub	r6,r6,r5
463	cmplwi	cr0,r6,10000
464	ble	3b
465	/* Setup final PLL */
466	mtspr	SPRN_HID1,r4
4671:
468	mtcr	r7
469	blr
470