• Home
  • History
  • Annotate
  • Line#
  • Navigate
  • Raw
  • Download
  • only in /netgear-WNDR4500v2-V1.0.0.60_1.0.38/src/linux/linux-2.6/arch/powerpc/platforms/powermac/
1/*
2 * This file contains low-level cache management functions
3 * used for sleep and CPU speed changes on Apple machines.
4 * (In fact the only thing that is Apple-specific is that we assume
5 * that we can read from ROM at physical address 0xfff00000.)
6 *
7 *    Copyright (C) 2004 Paul Mackerras (paulus@samba.org) and
8 *                       Benjamin Herrenschmidt (benh@kernel.crashing.org)
9 *
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License
12 * as published by the Free Software Foundation; either version
13 * 2 of the License, or (at your option) any later version.
14 *
15 */
16
17#include <asm/processor.h>
18#include <asm/ppc_asm.h>
19#include <asm/cputable.h>
20
21/*
22 * Flush and disable all data caches (dL1, L2, L3). This is used
23 * when going to sleep, when doing a PMU based cpufreq transition,
24 * or when "offlining" a CPU on SMP machines. This code is over
25 * paranoid, but I've had enough issues with various CPU revs and
26 * bugs that I decided it was worth beeing over cautious
27 */
28
29_GLOBAL(flush_disable_caches)
30#ifndef CONFIG_6xx
31	blr
32#else
33BEGIN_FTR_SECTION
34	b	flush_disable_745x
35END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450)
36BEGIN_FTR_SECTION
37	b	flush_disable_75x
38END_FTR_SECTION_IFSET(CPU_FTR_L2CR)
39	b	__flush_disable_L1
40
41/* This is the code for G3 and 74[01]0 */
42flush_disable_75x:
43	mflr	r10
44
45	/* Turn off EE and DR in MSR */
46	mfmsr	r11
47	rlwinm	r0,r11,0,~MSR_EE
48	rlwinm	r0,r0,0,~MSR_DR
49	sync
50	mtmsr	r0
51	isync
52
53	/* Stop DST streams */
54BEGIN_FTR_SECTION
55	DSSALL
56	sync
57END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
58
59	/* Stop DPM */
60	mfspr	r8,SPRN_HID0		/* Save SPRN_HID0 in r8 */
61	rlwinm	r4,r8,0,12,10		/* Turn off HID0[DPM] */
62	sync
63	mtspr	SPRN_HID0,r4		/* Disable DPM */
64	sync
65
66
67	/* ROM base by default */
68	lis	r4,0xfff0
69	mfpvr	r3
70	srwi	r3,r3,16
71	cmplwi	cr0,r3,0x7000
72	bne+	1f
73	/* RAM base on 750FX */
74	li	r4,0
751:	li	r4,0x4000
76	mtctr	r4
771:	lwz	r0,0(r4)
78	addi	r4,r4,32
79	bdnz	1b
80	sync
81	isync
82
83	/* Disable / invalidate / enable L1 data */
84	mfspr	r3,SPRN_HID0
85	rlwinm	r3,r3,0,~(HID0_DCE | HID0_ICE)
86	mtspr	SPRN_HID0,r3
87	sync
88	isync
89	ori	r3,r3,(HID0_DCE|HID0_DCI|HID0_ICE|HID0_ICFI)
90	sync
91	isync
92	mtspr	SPRN_HID0,r3
93	xori	r3,r3,(HID0_DCI|HID0_ICFI)
94	mtspr	SPRN_HID0,r3
95	sync
96
97	/* Get the current enable bit of the L2CR into r4 */
98	mfspr	r5,SPRN_L2CR
99	/* Set to data-only (pre-745x bit) */
100	oris	r3,r5,L2CR_L2DO@h
101	b	2f
102	/* When disabling L2, code must be in L1 */
103	.balign 32
1041:	mtspr	SPRN_L2CR,r3
1053:	sync
106	isync
107	b	1f
1082:	b	3f
1093:	sync
110	isync
111	b	1b
1121:	/* disp-flush L2. The interesting thing here is that the L2 can be
113	 * up to 2Mb ... so using the ROM, we'll end up wrapping back to memory
114	 * but that is probbaly fine. We disp-flush over 4Mb to be safe
115	 */
116	lis	r4,2
117	mtctr	r4
118	lis	r4,0xfff0
1191:	lwz	r0,0(r4)
120	addi	r4,r4,32
121	bdnz	1b
122	sync
123	isync
124	lis	r4,2
125	mtctr	r4
126	lis	r4,0xfff0
1271:	dcbf	0,r4
128	addi	r4,r4,32
129	bdnz	1b
130	sync
131	isync
132
133	/* now disable L2 */
134	rlwinm	r5,r5,0,~L2CR_L2E
135	b	2f
136	/* When disabling L2, code must be in L1 */
137	.balign 32
1381:	mtspr	SPRN_L2CR,r5
1393:	sync
140	isync
141	b	1f
1422:	b	3f
1433:	sync
144	isync
145	b	1b
1461:	sync
147	isync
148	/* Invalidate L2. This is pre-745x, we clear the L2I bit ourselves */
149	oris	r4,r5,L2CR_L2I@h
150	mtspr	SPRN_L2CR,r4
151	sync
152	isync
153
154	/* Wait for the invalidation to complete */
1551:	mfspr	r3,SPRN_L2CR
156	rlwinm.	r0,r3,0,31,31
157	bne	1b
158
159	/* Clear L2I */
160	xoris	r4,r4,L2CR_L2I@h
161	sync
162	mtspr	SPRN_L2CR,r4
163	sync
164
165	/* now disable the L1 data cache */
166	mfspr	r0,SPRN_HID0
167	rlwinm	r0,r0,0,~(HID0_DCE|HID0_ICE)
168	mtspr	SPRN_HID0,r0
169	sync
170	isync
171
172	/* Restore HID0[DPM] to whatever it was before */
173	sync
174	mfspr	r0,SPRN_HID0
175	rlwimi	r0,r8,0,11,11		/* Turn back HID0[DPM] */
176	mtspr	SPRN_HID0,r0
177	sync
178
179	/* restore DR and EE */
180	sync
181	mtmsr	r11
182	isync
183
184	mtlr	r10
185	blr
186
187/* This code is for 745x processors */
188flush_disable_745x:
189	/* Turn off EE and DR in MSR */
190	mfmsr	r11
191	rlwinm	r0,r11,0,~MSR_EE
192	rlwinm	r0,r0,0,~MSR_DR
193	sync
194	mtmsr	r0
195	isync
196
197	/* Stop prefetch streams */
198	DSSALL
199	sync
200
201	/* Disable L2 prefetching */
202	mfspr	r0,SPRN_MSSCR0
203	rlwinm	r0,r0,0,0,29
204	mtspr	SPRN_MSSCR0,r0
205	sync
206	isync
207	lis	r4,0
208	dcbf	0,r4
209	dcbf	0,r4
210	dcbf	0,r4
211	dcbf	0,r4
212	dcbf	0,r4
213	dcbf	0,r4
214	dcbf	0,r4
215	dcbf	0,r4
216
217	/* Due to a bug with the HW flush on some CPU revs, we occasionally
218	 * experience data corruption. I'm adding a displacement flush along
219	 * with a dcbf loop over a few Mb to "help". The problem isn't totally
220	 * fixed by this in theory, but at least, in practice, I couldn't reproduce
221	 * it even with a big hammer...
222	 */
223
224        lis     r4,0x0002
225        mtctr   r4
226 	li      r4,0
2271:
228        lwz     r0,0(r4)
229        addi    r4,r4,32                /* Go to start of next cache line */
230        bdnz    1b
231        isync
232
233        /* Now, flush the first 4MB of memory */
234        lis     r4,0x0002
235        mtctr   r4
236	li      r4,0
237        sync
2381:
239        dcbf    0,r4
240        addi    r4,r4,32                /* Go to start of next cache line */
241        bdnz    1b
242
243	/* Flush and disable the L1 data cache */
244	mfspr	r6,SPRN_LDSTCR
245	lis	r3,0xfff0	/* read from ROM for displacement flush */
246	li	r4,0xfe		/* start with only way 0 unlocked */
247	li	r5,128		/* 128 lines in each way */
2481:	mtctr	r5
249	rlwimi	r6,r4,0,24,31
250	mtspr	SPRN_LDSTCR,r6
251	sync
252	isync
2532:	lwz	r0,0(r3)	/* touch each cache line */
254	addi	r3,r3,32
255	bdnz	2b
256	rlwinm	r4,r4,1,24,30	/* move on to the next way */
257	ori	r4,r4,1
258	cmpwi	r4,0xff		/* all done? */
259	bne	1b
260	/* now unlock the L1 data cache */
261	li	r4,0
262	rlwimi	r6,r4,0,24,31
263	sync
264	mtspr	SPRN_LDSTCR,r6
265	sync
266	isync
267
268	/* Flush the L2 cache using the hardware assist */
269	mfspr	r3,SPRN_L2CR
270	cmpwi	r3,0		/* check if it is enabled first */
271	bge	4f
272	oris	r0,r3,(L2CR_L2IO_745x|L2CR_L2DO_745x)@h
273	b	2f
274	/* When disabling/locking L2, code must be in L1 */
275	.balign 32
2761:	mtspr	SPRN_L2CR,r0	/* lock the L2 cache */
2773:	sync
278	isync
279	b	1f
2802:	b	3f
2813:	sync
282	isync
283	b	1b
2841:	sync
285	isync
286	ori	r0,r3,L2CR_L2HWF_745x
287	sync
288	mtspr	SPRN_L2CR,r0	/* set the hardware flush bit */
2893:	mfspr	r0,SPRN_L2CR	/* wait for it to go to 0 */
290	andi.	r0,r0,L2CR_L2HWF_745x
291	bne	3b
292	sync
293	rlwinm	r3,r3,0,~L2CR_L2E
294	b	2f
295	/* When disabling L2, code must be in L1 */
296	.balign 32
2971:	mtspr	SPRN_L2CR,r3	/* disable the L2 cache */
2983:	sync
299	isync
300	b	1f
3012:	b	3f
3023:	sync
303	isync
304	b	1b
3051:	sync
306	isync
307	oris	r4,r3,L2CR_L2I@h
308	mtspr	SPRN_L2CR,r4
309	sync
310	isync
3111:	mfspr	r4,SPRN_L2CR
312	andis.	r0,r4,L2CR_L2I@h
313	bne	1b
314	sync
315
316BEGIN_FTR_SECTION
317	/* Flush the L3 cache using the hardware assist */
3184:	mfspr	r3,SPRN_L3CR
319	cmpwi	r3,0		/* check if it is enabled */
320	bge	6f
321	oris	r0,r3,L3CR_L3IO@h
322	ori	r0,r0,L3CR_L3DO
323	sync
324	mtspr	SPRN_L3CR,r0	/* lock the L3 cache */
325	sync
326	isync
327	ori	r0,r0,L3CR_L3HWF
328	sync
329	mtspr	SPRN_L3CR,r0	/* set the hardware flush bit */
3305:	mfspr	r0,SPRN_L3CR	/* wait for it to go to zero */
331	andi.	r0,r0,L3CR_L3HWF
332	bne	5b
333	rlwinm	r3,r3,0,~L3CR_L3E
334	sync
335	mtspr	SPRN_L3CR,r3	/* disable the L3 cache */
336	sync
337	ori	r4,r3,L3CR_L3I
338	mtspr	SPRN_L3CR,r4
3391:	mfspr	r4,SPRN_L3CR
340	andi.	r0,r4,L3CR_L3I
341	bne	1b
342	sync
343END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
344
3456:	mfspr	r0,SPRN_HID0	/* now disable the L1 data cache */
346	rlwinm	r0,r0,0,~HID0_DCE
347	mtspr	SPRN_HID0,r0
348	sync
349	isync
350	mtmsr	r11		/* restore DR and EE */
351	isync
352	blr
353#endif	/* CONFIG_6xx */
354