• Home
  • History
  • Annotate
  • Line#
  • Navigate
  • Raw
  • Download
  • only in /netgear-R7000-V1.0.7.12_1.2.5/components/opensource/linux/linux-2.6.36/arch/powerpc/kernel/
1/*
2	L2CR functions
3	Copyright �� 1997-1998 by PowerLogix R & D, Inc.
4
5	This program is free software; you can redistribute it and/or modify
6	it under the terms of the GNU General Public License as published by
7	the Free Software Foundation; either version 2 of the License, or
8	(at your option) any later version.
9
10	This program is distributed in the hope that it will be useful,
11	but WITHOUT ANY WARRANTY; without even the implied warranty of
12	MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13	GNU General Public License for more details.
14
15	You should have received a copy of the GNU General Public License
16	along with this program; if not, write to the Free Software
17	Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18*/
19#include <asm/processor.h>
20#include <asm/cputable.h>
21#include <asm/ppc_asm.h>
22#include <asm/cache.h>
23#include <asm/page.h>
24
25/* Usage:
26
27	When setting the L2CR register, you must do a few special
28	things.  If you are enabling the cache, you must perform a
29	global invalidate.  If you are disabling the cache, you must
30	flush the cache contents first.  This routine takes care of
31	doing these things.  When first enabling the cache, make sure
32	you pass in the L2CR you want, as well as passing in the
33	global invalidate bit set.  A global invalidate will only be
34	performed if the L2I bit is set in applyThis.  When enabling
35	the cache, you should also set the L2E bit in applyThis.  If
36	you want to modify the L2CR contents after the cache has been
37	enabled, the recommended procedure is to first call
38	__setL2CR(0) to disable the cache and then call it again with
39	the new values for L2CR.  Examples:
40
41	_setL2CR(0)		- disables the cache
42	_setL2CR(0xB3A04000)	- enables my G3 upgrade card:
43				- L2E set to turn on the cache
44				- L2SIZ set to 1MB
45				- L2CLK set to 1:1
46				- L2RAM set to pipelined synchronous late-write
47				- L2I set to perform a global invalidation
48				- L2OH set to 0.5 nS
49				- L2DF set because this upgrade card
50				  requires it
51
52	A similar call should work for your card.  You need to know
53	the correct setting for your card and then place them in the
54	fields I have outlined above.  Other fields support optional
55	features, such as L2DO which caches only data, or L2TS which
56	causes cache pushes from the L1 cache to go to the L2 cache
57	instead of to main memory.
58
59IMPORTANT:
60	Starting with the 7450, the bits in this register have moved
61	or behave differently.  The Enable, Parity Enable, Size,
62	and L2 Invalidate are the only bits that have not moved.
63	The size is read-only for these processors with internal L2
64	cache, and the invalidate is a control as well as status.
65		-- Dan
66
67*/
68/*
69 * Summary: this procedure ignores the L2I bit in the value passed in,
70 * flushes the cache if it was already enabled, always invalidates the
71 * cache, then enables the cache if the L2E bit is set in the value
72 * passed in.
73 *   -- paulus.
74 */
75_GLOBAL(_set_L2CR)
76	/* Make sure this is a 750 or 7400 chip */
77BEGIN_FTR_SECTION
78	li	r3,-1
79	blr
80END_FTR_SECTION_IFCLR(CPU_FTR_L2CR)
81
82	mflr	r9
83
84	/* Stop DST streams */
85BEGIN_FTR_SECTION
86	DSSALL
87	sync
88END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
89
90	/* Turn off interrupts and data relocation. */
91	mfmsr	r7		/* Save MSR in r7 */
92	rlwinm	r4,r7,0,17,15
93	rlwinm	r4,r4,0,28,26	/* Turn off DR bit */
94	sync
95	mtmsr	r4
96	isync
97
98	mfspr	r8,SPRN_HID0		/* Save HID0 in r8 */
99	rlwinm	r4,r8,0,12,10		/* Turn off HID0[DPM] */
100	sync
101	mtspr	SPRN_HID0,r4		/* Disable DPM */
102	sync
103
104	/* Get the current enable bit of the L2CR into r4 */
105	mfspr	r4,SPRN_L2CR
106
107	/* Tweak some bits */
108	rlwinm	r5,r3,0,0,0		/* r5 contains the new enable bit */
109	rlwinm	r3,r3,0,11,9		/* Turn off the invalidate bit */
110	rlwinm	r3,r3,0,1,31		/* Turn off the enable bit */
111
112	/* Check to see if we need to flush */
113	rlwinm.	r4,r4,0,0,0
114	beq	2f
115
116	/* Flush the cache. First, read the first 4MB of memory (physical) to
117	 * put new data in the cache.  (Actually we only need
118	 * the size of the L2 cache plus the size of the L1 cache, but 4MB will
119	 * cover everything just to be safe).
120	 */
121
122	 /**** Might be a good idea to set L2DO here - to prevent instructions
123	       from getting into the cache.  But since we invalidate
124	       the next time we enable the cache it doesn't really matter.
125	       Don't do this unless you accomodate all processor variations.
126	       The bit moved on the 7450.....
127	  ****/
128
129BEGIN_FTR_SECTION
130	/* Disable L2 prefetch on some 745x and try to ensure
131	 * L2 prefetch engines are idle. As explained by errata
132	 * text, we can't be sure they are, we just hope very hard
133	 * that well be enough (sic !). At least I noticed Apple
134	 * doesn't even bother doing the dcbf's here...
135	 */
136	mfspr	r4,SPRN_MSSCR0
137	rlwinm	r4,r4,0,0,29
138	sync
139	mtspr	SPRN_MSSCR0,r4
140	sync
141	isync
142	lis	r4,KERNELBASE@h
143	dcbf	0,r4
144	dcbf	0,r4
145	dcbf	0,r4
146	dcbf	0,r4
147END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450)
148
149	/* TODO: use HW flush assist when available */
150
151	lis	r4,0x0002
152	mtctr	r4
153	li	r4,0
1541:
155	lwzx	r0,r0,r4
156	addi	r4,r4,32		/* Go to start of next cache line */
157	bdnz	1b
158	isync
159
160	/* Now, flush the first 4MB of memory */
161	lis	r4,0x0002
162	mtctr	r4
163	li	r4,0
164	sync
1651:
166	dcbf	0,r4
167	addi	r4,r4,32		/* Go to start of next cache line */
168	bdnz	1b
169
1702:
171	/* Set up the L2CR configuration bits (and switch L2 off) */
172	/* CPU errata: Make sure the mtspr below is already in the
173	 * L1 icache
174	 */
175	b	20f
176	.balign	L1_CACHE_BYTES
17722:
178	sync
179	mtspr	SPRN_L2CR,r3
180	sync
181	b	23f
18220:
183	b	21f
18421:	sync
185	isync
186	b	22b
187
18823:
189	/* Perform a global invalidation */
190	oris	r3,r3,0x0020
191	sync
192	mtspr	SPRN_L2CR,r3
193	sync
194	isync				/* For errata */
195
196BEGIN_FTR_SECTION
197	/* On the 7450, we wait for the L2I bit to clear......
198	*/
19910:	mfspr	r3,SPRN_L2CR
200	andis.	r4,r3,0x0020
201	bne	10b
202	b	11f
203END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450)
204
205	/* Wait for the invalidation to complete */
2063:	mfspr	r3,SPRN_L2CR
207	rlwinm.	r4,r3,0,31,31
208	bne	3b
209
21011:	rlwinm	r3,r3,0,11,9		/* Turn off the L2I bit */
211	sync
212	mtspr	SPRN_L2CR,r3
213	sync
214
215	/* See if we need to enable the cache */
216	cmplwi	r5,0
217	beq	4f
218
219	/* Enable the cache */
220	oris	r3,r3,0x8000
221	mtspr	SPRN_L2CR,r3
222	sync
223
224	/* Enable L2 HW prefetch on 744x/745x */
225BEGIN_FTR_SECTION
226	mfspr	r3,SPRN_MSSCR0
227	ori	r3,r3,3
228	sync
229	mtspr	SPRN_MSSCR0,r3
230	sync
231	isync
232END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450)
2334:
234
235	/* Restore HID0[DPM] to whatever it was before */
236	sync
237	mtspr	1008,r8
238	sync
239
240	/* Restore MSR (restores EE and DR bits to original state) */
241	SYNC
242	mtmsr	r7
243	isync
244
245	mtlr	r9
246	blr
247
248_GLOBAL(_get_L2CR)
249	/* Return the L2CR contents */
250	li	r3,0
251BEGIN_FTR_SECTION
252	mfspr	r3,SPRN_L2CR
253END_FTR_SECTION_IFSET(CPU_FTR_L2CR)
254	blr
255
256
257/*
258 * Here is a similar routine for dealing with the L3 cache
259 * on the 745x family of chips
260 */
261
262_GLOBAL(_set_L3CR)
263	/* Make sure this is a 745x chip */
264BEGIN_FTR_SECTION
265	li	r3,-1
266	blr
267END_FTR_SECTION_IFCLR(CPU_FTR_L3CR)
268
269	/* Turn off interrupts and data relocation. */
270	mfmsr	r7		/* Save MSR in r7 */
271	rlwinm	r4,r7,0,17,15
272	rlwinm	r4,r4,0,28,26	/* Turn off DR bit */
273	sync
274	mtmsr	r4
275	isync
276
277	/* Stop DST streams */
278	DSSALL
279	sync
280
281	/* Get the current enable bit of the L3CR into r4 */
282	mfspr	r4,SPRN_L3CR
283
284	/* Tweak some bits */
285	rlwinm	r5,r3,0,0,0		/* r5 contains the new enable bit */
286	rlwinm	r3,r3,0,22,20		/* Turn off the invalidate bit */
287	rlwinm	r3,r3,0,2,31		/* Turn off the enable & PE bits */
288	rlwinm	r3,r3,0,5,3		/* Turn off the clken bit */
289	/* Check to see if we need to flush */
290	rlwinm.	r4,r4,0,0,0
291	beq	2f
292
293	/* Flush the cache.
294	 */
295
296	/* TODO: use HW flush assist */
297
298	lis	r4,0x0008
299	mtctr	r4
300	li	r4,0
3011:
302	lwzx	r0,r0,r4
303	dcbf	0,r4
304	addi	r4,r4,32		/* Go to start of next cache line */
305	bdnz	1b
306
3072:
308	/* Set up the L3CR configuration bits (and switch L3 off) */
309	sync
310	mtspr	SPRN_L3CR,r3
311	sync
312
313	oris	r3,r3,L3CR_L3RES@h		/* Set reserved bit 5 */
314	mtspr	SPRN_L3CR,r3
315	sync
316	oris	r3,r3,L3CR_L3CLKEN@h		/* Set clken */
317	mtspr	SPRN_L3CR,r3
318	sync
319
320	/* Wait for stabilize */
321	li	r0,256
322	mtctr	r0
3231:	bdnz	1b
324
325	/* Perform a global invalidation */
326	ori	r3,r3,0x0400
327	sync
328	mtspr	SPRN_L3CR,r3
329	sync
330	isync
331
332	/* We wait for the L3I bit to clear...... */
33310:	mfspr	r3,SPRN_L3CR
334	andi.	r4,r3,0x0400
335	bne	10b
336
337	/* Clear CLKEN */
338	rlwinm	r3,r3,0,5,3		/* Turn off the clken bit */
339	mtspr	SPRN_L3CR,r3
340	sync
341
342	/* Wait for stabilize */
343	li	r0,256
344	mtctr	r0
3451:	bdnz	1b
346
347	/* See if we need to enable the cache */
348	cmplwi	r5,0
349	beq	4f
350
351	/* Enable the cache */
352	oris	r3,r3,(L3CR_L3E | L3CR_L3CLKEN)@h
353	mtspr	SPRN_L3CR,r3
354	sync
355
356	/* Wait for stabilize */
357	li	r0,256
358	mtctr	r0
3591:	bdnz	1b
360
361	/* Restore MSR (restores EE and DR bits to original state) */
3624:	SYNC
363	mtmsr	r7
364	isync
365	blr
366
367_GLOBAL(_get_L3CR)
368	/* Return the L3CR contents */
369	li	r3,0
370BEGIN_FTR_SECTION
371	mfspr	r3,SPRN_L3CR
372END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
373	blr
374
375/* --- End of PowerLogix code ---
376 */
377
378
379/* flush_disable_L1()	- Flush and disable L1 cache
380 *
381 * clobbers r0, r3, ctr, cr0
382 * Must be called with interrupts disabled and MMU enabled.
383 */
384_GLOBAL(__flush_disable_L1)
385	/* Stop pending alitvec streams and memory accesses */
386BEGIN_FTR_SECTION
387	DSSALL
388END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
389 	sync
390
391	/* Load counter to 0x4000 cache lines (512k) and
392	 * load cache with datas
393	 */
394	li	r3,0x4000	/* 512kB / 32B */
395	mtctr	r3
396	lis	r3,KERNELBASE@h
3971:
398	lwz	r0,0(r3)
399	addi	r3,r3,0x0020	/* Go to start of next cache line */
400	bdnz	1b
401	isync
402	sync
403
404	/* Now flush those cache lines */
405	li	r3,0x4000	/* 512kB / 32B */
406	mtctr	r3
407	lis	r3,KERNELBASE@h
4081:
409	dcbf	0,r3
410	addi	r3,r3,0x0020	/* Go to start of next cache line */
411	bdnz	1b
412	sync
413
414	/* We can now disable the L1 cache (HID0:DCE, HID0:ICE) */
415	mfspr	r3,SPRN_HID0
416	rlwinm	r3,r3,0,18,15
417	mtspr	SPRN_HID0,r3
418	sync
419	isync
420 	blr
421
422/* inval_enable_L1	- Invalidate and enable L1 cache
423 *
424 * Assumes L1 is already disabled and MSR:EE is off
425 *
426 * clobbers r3
427 */
428_GLOBAL(__inval_enable_L1)
429	/* Enable and then Flash inval the instruction & data cache */
430	mfspr	r3,SPRN_HID0
431	ori	r3,r3, HID0_ICE|HID0_ICFI|HID0_DCE|HID0_DCI
432	sync
433	isync
434	mtspr	SPRN_HID0,r3
435	xori	r3,r3, HID0_ICFI|HID0_DCI
436	mtspr	SPRN_HID0,r3
437	sync
438
439 	blr
440