locore.S revision 187149
1176771Sraj/*-
2186229Sraj * Copyright (C) 2007-2008 Semihalf, Rafal Jaworowski <raj@semihalf.com>
3176771Sraj * Copyright (C) 2006 Semihalf, Marian Balakowicz <m8@semihalf.com>
4176771Sraj * All rights reserved.
5176771Sraj *
6176771Sraj * Redistribution and use in source and binary forms, with or without
7176771Sraj * modification, are permitted provided that the following conditions
8176771Sraj * are met:
9176771Sraj * 1. Redistributions of source code must retain the above copyright
10176771Sraj *    notice, this list of conditions and the following disclaimer.
11176771Sraj * 2. Redistributions in binary form must reproduce the above copyright
12176771Sraj *    notice, this list of conditions and the following disclaimer in the
13176771Sraj *    documentation and/or other materials provided with the distribution.
14176771Sraj *
15176771Sraj * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
16176771Sraj * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
17176771Sraj * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN
18176771Sraj * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
19176771Sraj * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
20176771Sraj * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21176771Sraj * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
22176771Sraj * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
23176771Sraj * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24176771Sraj * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25176771Sraj *
26176771Sraj * $FreeBSD: head/sys/powerpc/booke/locore.S 187149 2009-01-13 15:41:58Z raj $
27176771Sraj */
28176771Sraj
29176771Sraj#include "assym.s"
30176771Sraj
31186229Sraj#include <machine/asm.h>
32176771Sraj#include <machine/param.h>
33176771Sraj#include <machine/spr.h>
34176771Sraj#include <machine/psl.h>
35176771Sraj#include <machine/pte.h>
36176771Sraj#include <machine/trap.h>
37176771Sraj#include <machine/vmparam.h>
38176771Sraj#include <machine/tlb.h>
39176771Sraj#include <machine/bootinfo.h>
40176771Sraj
41182198Sraj#define TMPSTACKSZ	16384
42182198Sraj
43184319Smarcel	.text
44184319Smarcel	.globl	btext
45184319Smarcelbtext:
46184319Smarcel
47176771Sraj/*
48176771Sraj * This symbol is here for the benefit of kvm_mkdb, and is supposed to
49176771Sraj * mark the start of kernel text.
50176771Sraj */
51176771Sraj	.globl	kernel_text
52176771Srajkernel_text:
53176771Sraj
54176771Sraj/*
55176771Sraj * Startup entry.  Note, this must be the first thing in the text segment!
56176771Sraj */
57176771Sraj	.text
58176771Sraj	.globl	__start
59176771Sraj__start:
60176771Sraj
61176771Sraj/*
62186229Sraj * Assumptions on the boot loader:
63176771Sraj *  - system memory starts from physical address 0
64176771Sraj *  - it's mapped by a single TBL1 entry
65176771Sraj *  - TLB1 mapping is 1:1 pa to va
66186229Sraj *  - kernel is loaded at 16MB boundary
67176771Sraj *  - all PID registers are set to the same value
68186229Sraj *  - CPU is running in AS=0
69176771Sraj *
70186229Sraj * Registers contents provided by the loader(8):
71176771Sraj *	r1	: stack pointer
72176771Sraj *	r3	: metadata pointer
73176771Sraj *
74176771Sraj * We rearrange the TLB1 layout as follows:
75186229Sraj *  - find TLB1 entry we started in
76176771Sraj *  - make sure it's protected, ivalidate other entries
77186229Sraj *  - create temp entry in the second AS (make sure it's not TLB[1])
78176771Sraj *  - switch to temp mapping
79186229Sraj *  - map 16MB of RAM in TLB1[1]
80176771Sraj *  - use AS=1, set EPN to KERNBASE and RPN to kernel load address
81186229Sraj *  - switch to to TLB1[1] mapping
82176771Sraj *  - invalidate temp mapping
83176771Sraj *
84186229Sraj * locore registers use:
85176771Sraj *	r1	: stack pointer
86186229Sraj *	r2	: trace pointer (AP only, for early diagnostics)
87186229Sraj *	r3-r27	: scratch registers
88186229Sraj *	r28	: kernload
89186229Sraj *	r29	: temp TLB1 entry
90186229Sraj *	r30	: initial TLB1 entry we started in
91186229Sraj *	r31	: metadata pointer
92176771Sraj */
93176771Sraj
94176771Sraj/*
95186229Sraj * Keep metadata ptr in r31 for later use.
96176771Sraj */
97186229Sraj	mr	%r31, %r3
98176771Sraj
99176771Sraj/*
100176771Sraj * Initial cleanup
101176771Sraj */
102186229Sraj	li	%r3, PSL_DE	/* Keep debug exceptions for CodeWarrior. */
103186229Sraj	mtmsr	%r3
104176771Sraj	isync
105176771Sraj
106186229Sraj	/* Invalidate all entries in TLB0 */
107186229Sraj	li	%r3, 0
108186229Sraj	bl	tlb_inval_all
109176771Sraj
110176771Sraj/*
111186229Sraj * Locate the TLB1 entry that maps this code
112176771Sraj */
113186229Sraj	bl	1f
114186229Sraj1:	mflr	%r3
115186229Sraj	bl	tlb1_find_current	/* the entry number found is returned in r30 */
116176771Sraj
117186229Sraj	bl	tlb1_inval_all_but_current
118176771Sraj/*
119186229Sraj * Create temporary mapping in AS=1 and switch to it
120176771Sraj */
121186229Sraj	bl	tlb1_temp_mapping_as1
122176771Sraj
123186229Sraj	mfmsr	%r3
124186229Sraj	ori	%r3, %r3, (PSL_IS | PSL_DS)
125186229Sraj	bl	2f
126186229Sraj2:	mflr	%r4
127186229Sraj	addi	%r4, %r4, 20
128186229Sraj	mtspr	SPR_SRR0, %r4
129186229Sraj	mtspr	SPR_SRR1, %r3
130176771Sraj	rfi				/* Switch context */
131176771Sraj
132176771Sraj/*
133176771Sraj * Invalidate initial entry
134176771Sraj */
135186229Sraj	mr	%r3, %r30
136176771Sraj	bl	tlb1_inval_entry
137176771Sraj
138176771Sraj/*
139176771Sraj * Setup final mapping in TLB1[1] and switch to it
140176771Sraj */
141176771Sraj	/* Final kernel mapping, map in 16 MB of RAM */
142186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
143186229Sraj	li	%r4, 1			/* Entry 1 */
144186229Sraj	rlwimi	%r3, %r4, 16, 12, 15
145186229Sraj	mtspr	SPR_MAS0, %r3
146176771Sraj	isync
147176771Sraj
148186229Sraj	li	%r3, (TLB_SIZE_16M << MAS1_TSIZE_SHIFT)@l
149186229Sraj	oris	%r3, %r3, (MAS1_VALID | MAS1_IPROT)@h
150186229Sraj	mtspr	SPR_MAS1, %r3		/* note TS was not filled, so it's TS=0 */
151176771Sraj	isync
152176771Sraj
153186229Sraj	lis	%r3, KERNBASE@h
154186229Sraj	ori	%r3, %r3, KERNBASE@l	/* EPN = KERNBASE */
155186229Sraj	mtspr	SPR_MAS2, %r3
156176771Sraj	isync
157176771Sraj
158186229Sraj	/* Discover phys load address */
159186229Sraj	bl	3f
160186229Sraj3:	mflr	%r4			/* Use current address */
161186229Sraj	rlwinm	%r4, %r4, 0, 0, 7	/* 16MB alignment mask */
162186229Sraj	mr	%r28, %r4		/* Keep kernel load address */
163186229Sraj	ori	%r4, %r4, (MAS3_SX | MAS3_SW | MAS3_SR)@l
164186229Sraj	mtspr	SPR_MAS3, %r4		/* Set RPN and protection */
165176771Sraj	isync
166176771Sraj	tlbwe
167176771Sraj	isync
168176771Sraj	msync
169176771Sraj
170176771Sraj	/* Switch to the above TLB1[1] mapping */
171186229Sraj	bl	4f
172186229Sraj4:	mflr	%r4
173186229Sraj	rlwinm	%r4, %r4, 0, 8, 31	/* Current offset from kernel load address */
174186229Sraj	rlwinm	%r3, %r3, 0, 0, 19
175186229Sraj	add	%r4, %r4, %r3		/* Convert to kernel virtual address */
176186229Sraj	addi	%r4, %r4, 36
177186229Sraj	li	%r3, PSL_DE		/* Note AS=0 */
178186229Sraj	mtspr   SPR_SRR0, %r4
179186229Sraj	mtspr   SPR_SRR1, %r3
180176771Sraj	rfi
181176771Sraj
182176771Sraj/*
183176771Sraj * Invalidate temp mapping
184176771Sraj */
185186229Sraj	mr	%r3, %r29
186176771Sraj	bl	tlb1_inval_entry
187176771Sraj
188176771Sraj/*
189186229Sraj * Save kernel load address for later use.
190186229Sraj */
191186229Sraj	lis	%r3, kernload@ha
192186229Sraj	addi	%r3, %r3, kernload@l
193186229Sraj	stw	%r28, 0(%r3)
194186229Sraj
195186229Sraj/*
196176771Sraj * Setup a temporary stack
197176771Sraj */
198182198Sraj	lis	%r1, tmpstack@ha
199182198Sraj	addi	%r1, %r1, tmpstack@l
200182198Sraj	addi	%r1, %r1, (TMPSTACKSZ - 8)
201176771Sraj
202176771Sraj/*
203186289Sraj * Initialise exception vector offsets
204176771Sraj */
205176771Sraj	bl	ivor_setup
206176771Sraj
207176771Sraj/*
208186229Sraj * Set up arguments and jump to system initialization code
209176771Sraj */
210176771Sraj	lis	%r3, kernel_text@ha
211176771Sraj	addi	%r3, %r3, kernel_text@l
212176771Sraj	lis	%r4, _end@ha
213176771Sraj	addi	%r4, %r4, _end@l
214186229Sraj	mr	%r5, %r31		/* metadata ptr */
215176771Sraj
216186229Sraj	/* Prepare e500 core */
217182198Sraj	bl	e500_init
218182198Sraj
219186229Sraj	/* Switch to thread0.td_kstack now */
220182198Sraj	mr	%r1, %r3
221182198Sraj	li	%r3, 0
222182198Sraj	stw	%r3, 0(%r1)
223182198Sraj
224186229Sraj	/* Machine independet part, does not return */
225186229Sraj	bl	mi_startup
226186229Sraj	/* NOT REACHED */
227186229Sraj5:	b	5b
228176771Sraj
229186229Sraj/*
230186229Sraj * Invalidate all entries in the given TLB.
231186229Sraj *
232186229Sraj * r3	TLBSEL
233186229Sraj */
234186229Srajtlb_inval_all:
235186229Sraj	rlwinm	%r3, %r3, 3, 0x18	/* TLBSEL */
236186229Sraj	ori	%r3, %r3, 0x4		/* INVALL */
237186229Sraj	tlbivax	0, %r3
238186229Sraj	isync
239186229Sraj	msync
240176771Sraj
241186229Sraj	tlbsync
242186229Sraj	msync
243186229Sraj	blr
244186229Sraj
245186229Sraj/*
246186229Sraj * expects address to look up in r3, returns entry number in r30
247186229Sraj *
248186229Sraj * FIXME: the hidden assumption is we are now running in AS=0, but we should
249186229Sraj * retrieve actual AS from MSR[IS|DS] and put it in MAS6[SAS]
250186229Sraj */
251186229Srajtlb1_find_current:
252186229Sraj	mfspr	%r17, SPR_PID0
253186229Sraj	slwi	%r17, %r17, MAS6_SPID0_SHIFT
254186229Sraj	mtspr	SPR_MAS6, %r17
255176771Sraj	isync
256186229Sraj	tlbsx	0, %r3
257186229Sraj	mfspr	%r17, SPR_MAS0
258186229Sraj	rlwinm	%r30, %r17, 16, 20, 31		/* MAS0[ESEL] -> r30 */
259176771Sraj
260186229Sraj	/* Make sure we have IPROT set on the entry */
261186229Sraj	mfspr	%r17, SPR_MAS1
262186229Sraj	oris	%r17, %r17, MAS1_IPROT@h
263186229Sraj	mtspr	SPR_MAS1, %r17
264176771Sraj	isync
265176771Sraj	tlbwe
266176771Sraj	isync
267176771Sraj	msync
268176771Sraj	blr
269176771Sraj
270186229Sraj/*
271186229Sraj * Invalidates a single entry in TLB1.
272186229Sraj *
273186229Sraj * r3		ESEL
274186229Sraj * r4-r5	scratched
275186229Sraj */
276186229Srajtlb1_inval_entry:
277186229Sraj	lis	%r4, MAS0_TLBSEL1@h	/* Select TLB1 */
278186229Sraj	rlwimi	%r4, %r3, 16, 12, 15	/* Select our entry */
279186229Sraj	mtspr	SPR_MAS0, %r4
280186229Sraj	isync
281186229Sraj	tlbre
282186229Sraj	li	%r5, 0			/* MAS1[V] = 0 */
283186229Sraj	mtspr	SPR_MAS1, %r5
284186229Sraj	isync
285186229Sraj	tlbwe
286186229Sraj	isync
287186229Sraj	msync
288176771Sraj	blr
289176771Sraj
290176771Sraj/*
291186229Sraj * r30		current entry number
292186229Sraj * r29		returned temp entry
293186229Sraj * r3-r5	scratched
294176771Sraj */
295186229Srajtlb1_temp_mapping_as1:
296186229Sraj	/* Read our current translation */
297186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
298186229Sraj	rlwimi	%r3, %r30, 16, 12, 15	/* Select our current entry */
299186229Sraj	mtspr	SPR_MAS0, %r3
300186229Sraj	isync
301186229Sraj	tlbre
302176771Sraj
303186229Sraj	/*
304186229Sraj	 * Prepare and write temp entry
305186229Sraj	 *
306186229Sraj	 * FIXME this is not robust against overflow i.e. when the current
307186229Sraj	 * entry is the last in TLB1
308186229Sraj	 */
309186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
310186229Sraj	addi	%r29, %r30, 1		/* Use next entry. */
311186229Sraj	li	%r4, 1
312186229Sraj	cmpw	%r4, %r29
313186229Sraj	bne	1f
314186229Sraj	addi	%r29, %r29, 1
315186229Sraj1:	rlwimi	%r3, %r29, 16, 12, 15	/* Select temp entry */
316186229Sraj	mtspr	SPR_MAS0, %r3
317176771Sraj	isync
318186229Sraj	mfspr	%r5, SPR_MAS1
319186229Sraj	li	%r4, 1			/* AS=1 */
320186229Sraj	rlwimi	%r5, %r4, 12, 19, 19
321186229Sraj	li	%r4, 0			/* Global mapping, TID=0 */
322186229Sraj	rlwimi	%r5, %r4, 16, 8, 15
323186229Sraj	oris	%r5, %r5, (MAS1_VALID | MAS1_IPROT)@h
324186229Sraj	mtspr	SPR_MAS1, %r5
325176771Sraj	isync
326186229Sraj	tlbwe
327186229Sraj	isync
328176771Sraj	msync
329176771Sraj	blr
330176771Sraj
331176771Sraj/*
332186229Sraj * Loops over TLB1, invalidates all entries skipping the one which currently
333186229Sraj * maps this code.
334176771Sraj *
335186229Sraj * r30		current entry
336186229Sraj * r3-r5	scratched
337176771Sraj */
338186229Srajtlb1_inval_all_but_current:
339186229Sraj	mr	%r6, %r3
340186229Sraj	mfspr	%r3, SPR_TLB1CFG	/* Get number of entries */
341186229Sraj	andi.	%r3, %r3, TLBCFG_NENTRY_MASK@l
342186229Sraj	li	%r4, 0			/* Start from Entry 0 */
343186229Sraj1:	lis	%r5, MAS0_TLBSEL1@h
344186229Sraj	rlwimi	%r5, %r4, 16, 12, 15
345186229Sraj	mtspr	SPR_MAS0, %r5
346176771Sraj	isync
347186229Sraj	tlbre
348186229Sraj	mfspr	%r5, SPR_MAS1
349186229Sraj	cmpw	%r4, %r30		/* our current entry? */
350186229Sraj	beq	2f
351186229Sraj	rlwinm	%r5, %r5, 0, 2, 31	/* clear VALID and IPROT bits */
352186229Sraj	mtspr	SPR_MAS1, %r5
353176771Sraj	isync
354186229Sraj	tlbwe
355186229Sraj	isync
356176771Sraj	msync
357186229Sraj2:	addi	%r4, %r4, 1
358186229Sraj	cmpw	%r4, %r3		/* Check if this is the last entry */
359186229Sraj	bne	1b
360176771Sraj	blr
361176771Sraj
362186229Sraj/************************************************************************/
363186229Sraj/* locore subroutines */
364186229Sraj/************************************************************************/
365186229Sraj
366186229Srajivor_setup:
367186229Sraj	/* Set base address of interrupt handler routines */
368186229Sraj	lis	%r3, interrupt_vector_base@h
369186229Sraj	mtspr	SPR_IVPR, %r3
370186229Sraj
371186229Sraj	/* Assign interrupt handler routines offsets */
372186229Sraj	li	%r3, int_critical_input@l
373186229Sraj	mtspr	SPR_IVOR0, %r3
374186229Sraj	li	%r3, int_machine_check@l
375186229Sraj	mtspr	SPR_IVOR1, %r3
376186229Sraj	li	%r3, int_data_storage@l
377186229Sraj	mtspr	SPR_IVOR2, %r3
378186229Sraj	li	%r3, int_instr_storage@l
379186229Sraj	mtspr	SPR_IVOR3, %r3
380186229Sraj	li	%r3, int_external_input@l
381186229Sraj	mtspr	SPR_IVOR4, %r3
382186229Sraj	li	%r3, int_alignment@l
383186229Sraj	mtspr	SPR_IVOR5, %r3
384186229Sraj	li	%r3, int_program@l
385186229Sraj	mtspr	SPR_IVOR6, %r3
386186229Sraj	li	%r3, int_syscall@l
387186229Sraj	mtspr	SPR_IVOR8, %r3
388186229Sraj	li	%r3, int_decrementer@l
389186229Sraj	mtspr	SPR_IVOR10, %r3
390186229Sraj	li	%r3, int_fixed_interval_timer@l
391186229Sraj	mtspr	SPR_IVOR11, %r3
392186229Sraj	li	%r3, int_watchdog@l
393186229Sraj	mtspr	SPR_IVOR12, %r3
394186229Sraj	li	%r3, int_data_tlb_error@l
395186229Sraj	mtspr	SPR_IVOR13, %r3
396186229Sraj	li	%r3, int_inst_tlb_error@l
397186229Sraj	mtspr	SPR_IVOR14, %r3
398186229Sraj	li	%r3, int_debug@l
399186229Sraj	mtspr	SPR_IVOR15, %r3
400186229Sraj	blr
401186229Sraj
402176771Sraj/*
403187149Sraj * void tid_flush(tlbtid_t tid);
404187149Sraj *
405187149Sraj * Invalidate all TLB0 entries which match the given TID. Note this is
406187149Sraj * dedicated for cases when invalidation(s) should NOT be propagated to other
407187149Sraj * CPUs.
408187149Sraj *
409187149Sraj * Global vars tlb0_ways, tlb0_entries_per_way are assumed to have been set up
410187149Sraj * correctly (by tlb0_get_tlbconf()).
411187149Sraj *
412187149Sraj */
413187149SrajENTRY(tid_flush)
414187149Sraj	cmpwi	%r3, TID_KERNEL
415187149Sraj	beq	tid_flush_end	/* don't evict kernel translations */
416187149Sraj
417187149Sraj	/* Number of TLB0 ways */
418187149Sraj	lis	%r4, tlb0_ways@h
419187149Sraj	ori	%r4, %r4, tlb0_ways@l
420187149Sraj	lwz	%r4, 0(%r4)
421187149Sraj
422187149Sraj	/* Number of entries / way */
423187149Sraj	lis	%r5, tlb0_entries_per_way@h
424187149Sraj	ori	%r5, %r5, tlb0_entries_per_way@l
425187149Sraj	lwz	%r5, 0(%r5)
426187149Sraj
427187149Sraj	/* Disable interrupts */
428187149Sraj	mfmsr	%r10
429187149Sraj	wrteei	0
430187149Sraj
431187149Sraj	li	%r6, 0		/* ways counter */
432187149Srajloop_ways:
433187149Sraj	li	%r7, 0		/* entries [per way] counter */
434187149Srajloop_entries:
435187149Sraj	/* Select TLB0 and ESEL (way) */
436187149Sraj	lis	%r8, MAS0_TLBSEL0@h
437187149Sraj	rlwimi	%r8, %r6, 16, 14, 15
438187149Sraj	mtspr	SPR_MAS0, %r8
439187149Sraj	isync
440187149Sraj
441187149Sraj	/* Select EPN (entry within the way) */
442187149Sraj	rlwinm	%r8, %r7, 12, 13, 19
443187149Sraj	mtspr	SPR_MAS2, %r8
444187149Sraj	isync
445187149Sraj	tlbre
446187149Sraj
447187149Sraj	/* Check if valid entry */
448187149Sraj	mfspr	%r8, SPR_MAS1
449187149Sraj	andis.	%r9, %r8, MAS1_VALID@h
450187149Sraj	beq	next_entry	/* invalid entry */
451187149Sraj
452187149Sraj	/* Check if this is our TID */
453187149Sraj	rlwinm	%r9, %r8, 16, 24, 31
454187149Sraj
455187149Sraj	cmplw	%r9, %r3
456187149Sraj	bne	next_entry	/* not our TID */
457187149Sraj
458187149Sraj	/* Clear VALID bit */
459187149Sraj	rlwinm	%r8, %r8, 0, 1, 31
460187149Sraj	mtspr	SPR_MAS1, %r8
461187149Sraj	isync
462187149Sraj	tlbwe
463187149Sraj	isync
464187149Sraj	msync
465187149Sraj
466187149Srajnext_entry:
467187149Sraj	addi	%r7, %r7, 1
468187149Sraj	cmpw	%r7, %r5
469187149Sraj	bne	loop_entries
470187149Sraj
471187149Sraj	/* Next way */
472187149Sraj	addi	%r6, %r6, 1
473187149Sraj	cmpw	%r6, %r4
474187149Sraj	bne	loop_ways
475187149Sraj
476187149Sraj	/* Restore MSR (possibly re-enable interrupts) */
477187149Sraj	mtmsr	%r10
478187149Sraj	isync
479187149Sraj
480187149Srajtid_flush_end:
481187149Sraj	blr
482187149Sraj
483187149Sraj/*
484176771Sraj * Cache disable/enable/inval sequences according
485176771Sraj * to section 2.16 of E500CORE RM.
486176771Sraj */
487176771SrajENTRY(dcache_inval)
488176771Sraj	/* Invalidate d-cache */
489176771Sraj	mfspr	%r3, SPR_L1CSR0
490176771Sraj	ori	%r3, %r3, (L1CSR0_DCFI | L1CSR0_DCLFR)@l
491176771Sraj	msync
492176771Sraj	isync
493176771Sraj	mtspr	SPR_L1CSR0, %r3
494176771Sraj	isync
495186230Sraj1:	mfspr	%r3, SPR_L1CSR0
496186230Sraj	andi.	%r3, %r3, L1CSR0_DCFI
497186230Sraj	bne	1b
498176771Sraj	blr
499176771Sraj
500176771SrajENTRY(dcache_disable)
501176771Sraj	/* Disable d-cache */
502176771Sraj	mfspr	%r3, SPR_L1CSR0
503176771Sraj	li	%r4, L1CSR0_DCE@l
504176771Sraj	not	%r4, %r4
505176771Sraj	and	%r3, %r3, %r4
506176771Sraj	msync
507176771Sraj	isync
508176771Sraj	mtspr	SPR_L1CSR0, %r3
509176771Sraj	isync
510176771Sraj	blr
511176771Sraj
512176771SrajENTRY(dcache_enable)
513176771Sraj	/* Enable d-cache */
514176771Sraj	mfspr	%r3, SPR_L1CSR0
515176771Sraj	oris	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@h
516176771Sraj	ori	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@l
517176771Sraj	msync
518176771Sraj	isync
519176771Sraj	mtspr	SPR_L1CSR0, %r3
520176771Sraj	isync
521176771Sraj	blr
522176771Sraj
523176771SrajENTRY(icache_inval)
524176771Sraj	/* Invalidate i-cache */
525176771Sraj	mfspr	%r3, SPR_L1CSR1
526176771Sraj	ori	%r3, %r3, (L1CSR1_ICFI | L1CSR1_ICLFR)@l
527176771Sraj	isync
528176771Sraj	mtspr	SPR_L1CSR1, %r3
529176771Sraj	isync
530186230Sraj1:	mfspr	%r3, SPR_L1CSR1
531186230Sraj	andi.	%r3, %r3, L1CSR1_ICFI
532186230Sraj	bne	1b
533176771Sraj	blr
534176771Sraj
535176771SrajENTRY(icache_disable)
536176771Sraj	/* Disable i-cache */
537176771Sraj	mfspr	%r3, SPR_L1CSR1
538176771Sraj	li	%r4, L1CSR1_ICE@l
539176771Sraj	not	%r4, %r4
540176771Sraj	and	%r3, %r3, %r4
541176771Sraj	isync
542176771Sraj	mtspr	SPR_L1CSR1, %r3
543176771Sraj	isync
544176771Sraj	blr
545176771Sraj
546176771SrajENTRY(icache_enable)
547176771Sraj	/* Enable i-cache */
548176771Sraj	mfspr	%r3, SPR_L1CSR1
549176771Sraj	oris	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@h
550176771Sraj	ori	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@l
551176771Sraj	isync
552176771Sraj	mtspr	SPR_L1CSR1, %r3
553176771Sraj	isync
554176771Sraj	blr
555176771Sraj
556176771Sraj/*
557176771Sraj * int setfault()
558176771Sraj *
559176771Sraj * Similar to setjmp to setup for handling faults on accesses to user memory.
560176771Sraj * Any routine using this may only call bcopy, either the form below,
561176771Sraj * or the (currently used) C code optimized, so it doesn't use any non-volatile
562176771Sraj * registers.
563176771Sraj */
564176771Sraj	.globl	setfault
565176771Srajsetfault:
566176771Sraj	mflr	%r0
567176771Sraj	mfsprg0	%r4
568176771Sraj	lwz	%r4, PC_CURTHREAD(%r4)
569176771Sraj	lwz	%r4, TD_PCB(%r4)
570176771Sraj	stw	%r3, PCB_ONFAULT(%r4)
571176771Sraj	mfcr	%r10
572176771Sraj	mfctr	%r11
573176771Sraj	mfxer	%r12
574176771Sraj	stw	%r0, 0(%r3)
575176771Sraj	stw	%r1, 4(%r3)
576176771Sraj	stw	%r2, 8(%r3)
577176771Sraj	stmw	%r10, 12(%r3)		/* store CR, CTR, XER, [r13 .. r31] */
578176771Sraj	li	%r3, 0			/* return FALSE */
579176771Sraj	blr
580176771Sraj
581176771Sraj/************************************************************************/
582176771Sraj/* Data section								*/
583176771Sraj/************************************************************************/
584176771Sraj	.data
585176771Sraj	.align	4
586182198Srajtmpstack:
587182198Sraj	.space	TMPSTACKSZ
588176771Sraj
589176771Sraj/*
590176771Sraj * Compiled KERNBASE locations
591176771Sraj */
592176771Sraj	.globl	kernbase
593176771Sraj	.set	kernbase, KERNBASE
594176771Sraj
595176771Sraj/*
596176771Sraj * Globals
597176771Sraj */
598176771Sraj#define	INTRCNT_COUNT	256		/* max(HROWPIC_IRQMAX,OPENPIC_IRQMAX) */
599176771Sraj
600176771SrajGLOBAL(kernload)
601186229Sraj	.long	0
602176771SrajGLOBAL(intrnames)
603176771Sraj	.space	INTRCNT_COUNT * (MAXCOMLEN + 1) * 2
604176771SrajGLOBAL(eintrnames)
605176771Sraj	.align 4
606176771SrajGLOBAL(intrcnt)
607176771Sraj	.space	INTRCNT_COUNT * 4 * 2
608176771SrajGLOBAL(eintrcnt)
609176771Sraj
610176771Sraj#include <powerpc/booke/trap_subr.S>
611