locore.S revision 277334
1176771Sraj/*-
2192532Sraj * Copyright (C) 2007-2009 Semihalf, Rafal Jaworowski <raj@semihalf.com>
3176771Sraj * Copyright (C) 2006 Semihalf, Marian Balakowicz <m8@semihalf.com>
4176771Sraj * All rights reserved.
5176771Sraj *
6176771Sraj * Redistribution and use in source and binary forms, with or without
7176771Sraj * modification, are permitted provided that the following conditions
8176771Sraj * are met:
9176771Sraj * 1. Redistributions of source code must retain the above copyright
10176771Sraj *    notice, this list of conditions and the following disclaimer.
11176771Sraj * 2. Redistributions in binary form must reproduce the above copyright
12176771Sraj *    notice, this list of conditions and the following disclaimer in the
13176771Sraj *    documentation and/or other materials provided with the distribution.
14176771Sraj *
15176771Sraj * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
16176771Sraj * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
17176771Sraj * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN
18176771Sraj * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
19176771Sraj * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
20176771Sraj * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21176771Sraj * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
22176771Sraj * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
23176771Sraj * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24176771Sraj * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25176771Sraj *
26176771Sraj * $FreeBSD: head/sys/powerpc/booke/locore.S 277334 2015-01-18 18:32:43Z nwhitehorn $
27176771Sraj */
28176771Sraj
29176771Sraj#include "assym.s"
30176771Sraj
31186229Sraj#include <machine/asm.h>
32191375Sraj#include <machine/hid.h>
33176771Sraj#include <machine/param.h>
34176771Sraj#include <machine/spr.h>
35176771Sraj#include <machine/pte.h>
36176771Sraj#include <machine/trap.h>
37176771Sraj#include <machine/vmparam.h>
38176771Sraj#include <machine/tlb.h>
39176771Sraj
40182198Sraj#define TMPSTACKSZ	16384
41182198Sraj
42184319Smarcel	.text
43184319Smarcel	.globl	btext
44184319Smarcelbtext:
45184319Smarcel
46176771Sraj/*
47176771Sraj * This symbol is here for the benefit of kvm_mkdb, and is supposed to
48176771Sraj * mark the start of kernel text.
49176771Sraj */
50176771Sraj	.globl	kernel_text
51176771Srajkernel_text:
52176771Sraj
53176771Sraj/*
54176771Sraj * Startup entry.  Note, this must be the first thing in the text segment!
55176771Sraj */
56176771Sraj	.text
57176771Sraj	.globl	__start
58176771Sraj__start:
59176771Sraj
60176771Sraj/*
61186229Sraj * Assumptions on the boot loader:
62176771Sraj *  - system memory starts from physical address 0
63176771Sraj *  - it's mapped by a single TBL1 entry
64176771Sraj *  - TLB1 mapping is 1:1 pa to va
65186229Sraj *  - kernel is loaded at 16MB boundary
66176771Sraj *  - all PID registers are set to the same value
67186229Sraj *  - CPU is running in AS=0
68176771Sraj *
69186229Sraj * Registers contents provided by the loader(8):
70176771Sraj *	r1	: stack pointer
71176771Sraj *	r3	: metadata pointer
72176771Sraj *
73176771Sraj * We rearrange the TLB1 layout as follows:
74186229Sraj *  - find TLB1 entry we started in
75176771Sraj *  - make sure it's protected, ivalidate other entries
76186229Sraj *  - create temp entry in the second AS (make sure it's not TLB[1])
77176771Sraj *  - switch to temp mapping
78186229Sraj *  - map 16MB of RAM in TLB1[1]
79176771Sraj *  - use AS=1, set EPN to KERNBASE and RPN to kernel load address
80186229Sraj *  - switch to to TLB1[1] mapping
81176771Sraj *  - invalidate temp mapping
82176771Sraj *
83186229Sraj * locore registers use:
84176771Sraj *	r1	: stack pointer
85186229Sraj *	r2	: trace pointer (AP only, for early diagnostics)
86224611Smarcel *	r3-r27	: scratch registers
87222400Smarcel *	r28	: temp TLB1 entry
88222400Smarcel *	r29	: initial TLB1 entry we started in
89222400Smarcel *	r30-r31	: arguments (metadata pointer)
90176771Sraj */
91176771Sraj
92176771Sraj/*
93222400Smarcel * Keep arguments in r30 & r31 for later use.
94176771Sraj */
95222400Smarcel	mr	%r30, %r3
96222400Smarcel	mr	%r31, %r4
97176771Sraj
98176771Sraj/*
99176771Sraj * Initial cleanup
100176771Sraj */
101186229Sraj	li	%r3, PSL_DE	/* Keep debug exceptions for CodeWarrior. */
102186229Sraj	mtmsr	%r3
103176771Sraj	isync
104176771Sraj
105191375Sraj	lis	%r3, HID0_E500_DEFAULT_SET@h
106191375Sraj	ori	%r3, %r3, HID0_E500_DEFAULT_SET@l
107191375Sraj	mtspr	SPR_HID0, %r3
108191375Sraj	isync
109191375Sraj	lis	%r3, HID1_E500_DEFAULT_SET@h
110191375Sraj	ori	%r3, %r3, HID1_E500_DEFAULT_SET@l
111191375Sraj	mtspr	SPR_HID1, %r3
112191375Sraj	isync
113191375Sraj
114186229Sraj	/* Invalidate all entries in TLB0 */
115186229Sraj	li	%r3, 0
116186229Sraj	bl	tlb_inval_all
117176771Sraj
118224611Smarcel	cmpwi	%r30, 0
119224611Smarcel	beq	done_mapping
120224611Smarcel
121176771Sraj/*
122186229Sraj * Locate the TLB1 entry that maps this code
123176771Sraj */
124186229Sraj	bl	1f
125186229Sraj1:	mflr	%r3
126222400Smarcel	bl	tlb1_find_current	/* the entry found is returned in r29 */
127176771Sraj
128186229Sraj	bl	tlb1_inval_all_but_current
129242526Smarcel
130176771Sraj/*
131186229Sraj * Create temporary mapping in AS=1 and switch to it
132176771Sraj */
133242526Smarcel	addi	%r3, %r29, 1
134186229Sraj	bl	tlb1_temp_mapping_as1
135176771Sraj
136186229Sraj	mfmsr	%r3
137186229Sraj	ori	%r3, %r3, (PSL_IS | PSL_DS)
138186229Sraj	bl	2f
139186229Sraj2:	mflr	%r4
140186229Sraj	addi	%r4, %r4, 20
141186229Sraj	mtspr	SPR_SRR0, %r4
142186229Sraj	mtspr	SPR_SRR1, %r3
143176771Sraj	rfi				/* Switch context */
144176771Sraj
145176771Sraj/*
146176771Sraj * Invalidate initial entry
147176771Sraj */
148222400Smarcel	mr	%r3, %r29
149176771Sraj	bl	tlb1_inval_entry
150176771Sraj
151176771Sraj/*
152176771Sraj * Setup final mapping in TLB1[1] and switch to it
153176771Sraj */
154176771Sraj	/* Final kernel mapping, map in 16 MB of RAM */
155186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
156222391Smarcel	li	%r4, 0			/* Entry 0 */
157186229Sraj	rlwimi	%r3, %r4, 16, 12, 15
158186229Sraj	mtspr	SPR_MAS0, %r3
159176771Sraj	isync
160176771Sraj
161257190Snwhitehorn	li	%r3, (TLB_SIZE_64M << MAS1_TSIZE_SHIFT)@l
162186229Sraj	oris	%r3, %r3, (MAS1_VALID | MAS1_IPROT)@h
163186229Sraj	mtspr	SPR_MAS1, %r3		/* note TS was not filled, so it's TS=0 */
164176771Sraj	isync
165176771Sraj
166186229Sraj	lis	%r3, KERNBASE@h
167186229Sraj	ori	%r3, %r3, KERNBASE@l	/* EPN = KERNBASE */
168192532Sraj#ifdef SMP
169192532Sraj	ori	%r3, %r3, MAS2_M@l	/* WIMGE = 0b00100 */
170192532Sraj#endif
171186229Sraj	mtspr	SPR_MAS2, %r3
172176771Sraj	isync
173176771Sraj
174186229Sraj	/* Discover phys load address */
175186229Sraj	bl	3f
176186229Sraj3:	mflr	%r4			/* Use current address */
177186229Sraj	rlwinm	%r4, %r4, 0, 0, 7	/* 16MB alignment mask */
178186229Sraj	ori	%r4, %r4, (MAS3_SX | MAS3_SW | MAS3_SR)@l
179186229Sraj	mtspr	SPR_MAS3, %r4		/* Set RPN and protection */
180176771Sraj	isync
181176771Sraj	tlbwe
182176771Sraj	isync
183176771Sraj	msync
184176771Sraj
185176771Sraj	/* Switch to the above TLB1[1] mapping */
186186229Sraj	bl	4f
187186229Sraj4:	mflr	%r4
188186229Sraj	rlwinm	%r4, %r4, 0, 8, 31	/* Current offset from kernel load address */
189186229Sraj	rlwinm	%r3, %r3, 0, 0, 19
190186229Sraj	add	%r4, %r4, %r3		/* Convert to kernel virtual address */
191186229Sraj	addi	%r4, %r4, 36
192186229Sraj	li	%r3, PSL_DE		/* Note AS=0 */
193186229Sraj	mtspr   SPR_SRR0, %r4
194186229Sraj	mtspr   SPR_SRR1, %r3
195176771Sraj	rfi
196176771Sraj
197176771Sraj/*
198176771Sraj * Invalidate temp mapping
199176771Sraj */
200222400Smarcel	mr	%r3, %r28
201176771Sraj	bl	tlb1_inval_entry
202176771Sraj
203224611Smarceldone_mapping:
204186229Sraj
205186229Sraj/*
206176771Sraj * Setup a temporary stack
207176771Sraj */
208182198Sraj	lis	%r1, tmpstack@ha
209182198Sraj	addi	%r1, %r1, tmpstack@l
210277334Snwhitehorn	addi	%r1, %r1, (TMPSTACKSZ - 16)
211176771Sraj
212176771Sraj/*
213186289Sraj * Initialise exception vector offsets
214176771Sraj */
215176771Sraj	bl	ivor_setup
216176771Sraj
217176771Sraj/*
218186229Sraj * Set up arguments and jump to system initialization code
219176771Sraj */
220222400Smarcel	mr	%r3, %r30
221222400Smarcel	mr	%r4, %r31
222176771Sraj
223236141Sraj	/* Prepare core */
224222400Smarcel	bl	booke_init
225182198Sraj
226186229Sraj	/* Switch to thread0.td_kstack now */
227182198Sraj	mr	%r1, %r3
228182198Sraj	li	%r3, 0
229182198Sraj	stw	%r3, 0(%r1)
230182198Sraj
231186229Sraj	/* Machine independet part, does not return */
232186229Sraj	bl	mi_startup
233186229Sraj	/* NOT REACHED */
234186229Sraj5:	b	5b
235176771Sraj
236192532Sraj
237192532Sraj#ifdef SMP
238192532Sraj/************************************************************************/
239192532Sraj/* AP Boot page */
240192532Sraj/************************************************************************/
241192532Sraj	.text
242192532Sraj	.globl	__boot_page
243192532Sraj	.align	12
244192532Sraj__boot_page:
245192532Sraj	bl	1f
246192532Sraj
247242526Smarcel	.globl	bp_ntlb1s
248242526Smarcelbp_ntlb1s:
249192532Sraj	.long	0
250192532Sraj
251242526Smarcel	.globl	bp_tlb1
252242526Smarcelbp_tlb1:
253242526Smarcel	.space	4 * 3 * 16
254235932Smarcel
255242526Smarcel	.globl	bp_tlb1_end
256242526Smarcelbp_tlb1_end:
257242526Smarcel
258186229Sraj/*
259192532Sraj * Initial configuration
260192532Sraj */
261242526Smarcel1:	mflr	%r31		/* r31 hold the address of bp_ntlb1s */
262235932Smarcel
263192532Sraj	/* Set HIDs */
264192532Sraj	lis	%r3, HID0_E500_DEFAULT_SET@h
265192532Sraj	ori	%r3, %r3, HID0_E500_DEFAULT_SET@l
266192532Sraj	mtspr	SPR_HID0, %r3
267192532Sraj	isync
268192532Sraj	lis	%r3, HID1_E500_DEFAULT_SET@h
269192532Sraj	ori	%r3, %r3, HID1_E500_DEFAULT_SET@l
270192532Sraj	mtspr	SPR_HID1, %r3
271192532Sraj	isync
272192532Sraj
273192532Sraj	/* Enable branch prediction */
274192532Sraj	li	%r3, BUCSR_BPEN
275192532Sraj	mtspr	SPR_BUCSR, %r3
276192532Sraj	isync
277192532Sraj
278192532Sraj	/* Invalidate all entries in TLB0 */
279192532Sraj	li	%r3, 0
280192532Sraj	bl	tlb_inval_all
281192532Sraj
282192532Sraj/*
283192532Sraj * Find TLB1 entry which is translating us now
284192532Sraj */
285192532Sraj	bl	2f
286192532Sraj2:	mflr	%r3
287222400Smarcel	bl	tlb1_find_current	/* the entry number found is in r29 */
288192532Sraj
289192532Sraj	bl	tlb1_inval_all_but_current
290242526Smarcel
291192532Sraj/*
292192532Sraj * Create temporary translation in AS=1 and switch to it
293192532Sraj */
294242526Smarcel	lwz	%r3, 0(%r31)
295192532Sraj	bl	tlb1_temp_mapping_as1
296192532Sraj
297192532Sraj	mfmsr	%r3
298192532Sraj	ori	%r3, %r3, (PSL_IS | PSL_DS)
299192532Sraj	bl	3f
300192532Sraj3:	mflr	%r4
301192532Sraj	addi	%r4, %r4, 20
302192532Sraj	mtspr	SPR_SRR0, %r4
303192532Sraj	mtspr	SPR_SRR1, %r3
304192532Sraj	rfi				/* Switch context */
305192532Sraj
306192532Sraj/*
307192532Sraj * Invalidate initial entry
308192532Sraj */
309222400Smarcel	mr	%r3, %r29
310192532Sraj	bl	tlb1_inval_entry
311192532Sraj
312192532Sraj/*
313192532Sraj * Setup final mapping in TLB1[1] and switch to it
314192532Sraj */
315242526Smarcel	lwz	%r6, 0(%r31)
316242526Smarcel	addi	%r5, %r31, 4
317242526Smarcel	li	%r4, 0
318242526Smarcel
319242526Smarcel4:	lis	%r3, MAS0_TLBSEL1@h
320242526Smarcel	rlwimi	%r3, %r4, 16, 12, 15
321192532Sraj	mtspr	SPR_MAS0, %r3
322192532Sraj	isync
323242526Smarcel	lwz	%r3, 0(%r5)
324242526Smarcel	mtspr	SPR_MAS1, %r3
325192532Sraj	isync
326242526Smarcel	lwz	%r3, 4(%r5)
327192532Sraj	mtspr	SPR_MAS2, %r3
328192532Sraj	isync
329242526Smarcel	lwz	%r3, 8(%r5)
330192532Sraj	mtspr	SPR_MAS3, %r3
331192532Sraj	isync
332192532Sraj	tlbwe
333192532Sraj	isync
334192532Sraj	msync
335242526Smarcel	addi	%r5, %r5, 12
336242526Smarcel	addi	%r4, %r4, 1
337242526Smarcel	cmpw	%r4, %r6
338242526Smarcel	blt	4b
339192532Sraj
340192532Sraj	/* Switch to the final mapping */
341242526Smarcel	lis	%r5, __boot_page@ha
342242526Smarcel	ori	%r5, %r5, __boot_page@l
343192532Sraj	bl	5f
344192532Sraj5:	mflr	%r3
345192532Sraj	rlwinm	%r3, %r3, 0, 0xfff	/* Offset from boot page start */
346192532Sraj	add	%r3, %r3, %r5		/* Make this virtual address */
347192532Sraj	addi	%r3, %r3, 32
348192532Sraj	li	%r4, 0			/* Note AS=0 */
349192532Sraj	mtspr	SPR_SRR0, %r3
350192532Sraj	mtspr	SPR_SRR1, %r4
351192532Sraj	rfi
352192532Sraj
353192532Sraj/*
354192532Sraj * At this point we're running at virtual addresses KERNBASE and beyond so
355192532Sraj * it's allowed to directly access all locations the kernel was linked
356192532Sraj * against.
357192532Sraj */
358192532Sraj
359192532Sraj/*
360192532Sraj * Invalidate temp mapping
361192532Sraj */
362222400Smarcel	mr	%r3, %r28
363192532Sraj	bl	tlb1_inval_entry
364192532Sraj
365192532Sraj/*
366192532Sraj * Setup a temporary stack
367192532Sraj */
368192532Sraj	lis	%r1, tmpstack@ha
369192532Sraj	addi	%r1, %r1, tmpstack@l
370277334Snwhitehorn	addi	%r1, %r1, (TMPSTACKSZ - 16)
371192532Sraj
372192532Sraj/*
373192532Sraj * Initialise exception vector offsets
374192532Sraj */
375192532Sraj	bl	ivor_setup
376192532Sraj
377192532Sraj	/*
378192532Sraj	 * Assign our pcpu instance
379192532Sraj	 */
380192532Sraj	lis	%r3, ap_pcpu@h
381192532Sraj	ori	%r3, %r3, ap_pcpu@l
382192532Sraj	lwz	%r3, 0(%r3)
383192532Sraj	mtsprg0	%r3
384192532Sraj
385192532Sraj	bl	pmap_bootstrap_ap
386192532Sraj
387192532Sraj	bl	cpudep_ap_bootstrap
388192532Sraj	/* Switch to the idle thread's kstack */
389192532Sraj	mr	%r1, %r3
390192532Sraj
391192532Sraj	bl	machdep_ap_bootstrap
392192532Sraj
393192532Sraj	/* NOT REACHED */
394192532Sraj6:	b	6b
395192532Sraj#endif /* SMP */
396192532Sraj
397192532Sraj/*
398186229Sraj * Invalidate all entries in the given TLB.
399186229Sraj *
400186229Sraj * r3	TLBSEL
401186229Sraj */
402186229Srajtlb_inval_all:
403186229Sraj	rlwinm	%r3, %r3, 3, 0x18	/* TLBSEL */
404186229Sraj	ori	%r3, %r3, 0x4		/* INVALL */
405186229Sraj	tlbivax	0, %r3
406186229Sraj	isync
407186229Sraj	msync
408176771Sraj
409186229Sraj	tlbsync
410186229Sraj	msync
411186229Sraj	blr
412186229Sraj
413186229Sraj/*
414222400Smarcel * expects address to look up in r3, returns entry number in r29
415186229Sraj *
416186229Sraj * FIXME: the hidden assumption is we are now running in AS=0, but we should
417186229Sraj * retrieve actual AS from MSR[IS|DS] and put it in MAS6[SAS]
418186229Sraj */
419186229Srajtlb1_find_current:
420186229Sraj	mfspr	%r17, SPR_PID0
421186229Sraj	slwi	%r17, %r17, MAS6_SPID0_SHIFT
422186229Sraj	mtspr	SPR_MAS6, %r17
423176771Sraj	isync
424186229Sraj	tlbsx	0, %r3
425186229Sraj	mfspr	%r17, SPR_MAS0
426222400Smarcel	rlwinm	%r29, %r17, 16, 20, 31		/* MAS0[ESEL] -> r29 */
427176771Sraj
428186229Sraj	/* Make sure we have IPROT set on the entry */
429186229Sraj	mfspr	%r17, SPR_MAS1
430186229Sraj	oris	%r17, %r17, MAS1_IPROT@h
431186229Sraj	mtspr	SPR_MAS1, %r17
432176771Sraj	isync
433176771Sraj	tlbwe
434176771Sraj	isync
435176771Sraj	msync
436176771Sraj	blr
437176771Sraj
438186229Sraj/*
439186229Sraj * Invalidates a single entry in TLB1.
440186229Sraj *
441186229Sraj * r3		ESEL
442186229Sraj * r4-r5	scratched
443186229Sraj */
444186229Srajtlb1_inval_entry:
445186229Sraj	lis	%r4, MAS0_TLBSEL1@h	/* Select TLB1 */
446186229Sraj	rlwimi	%r4, %r3, 16, 12, 15	/* Select our entry */
447186229Sraj	mtspr	SPR_MAS0, %r4
448186229Sraj	isync
449186229Sraj	tlbre
450186229Sraj	li	%r5, 0			/* MAS1[V] = 0 */
451186229Sraj	mtspr	SPR_MAS1, %r5
452186229Sraj	isync
453186229Sraj	tlbwe
454186229Sraj	isync
455186229Sraj	msync
456176771Sraj	blr
457176771Sraj
458176771Sraj/*
459242526Smarcel * r3		entry of temp translation
460242526Smarcel * r29		entry of current translation
461242526Smarcel * r28		returns temp entry passed in r3
462242526Smarcel * r4-r5	scratched
463176771Sraj */
464186229Srajtlb1_temp_mapping_as1:
465242526Smarcel	mr	%r28, %r3
466242526Smarcel
467186229Sraj	/* Read our current translation */
468186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
469222400Smarcel	rlwimi	%r3, %r29, 16, 12, 15	/* Select our current entry */
470186229Sraj	mtspr	SPR_MAS0, %r3
471186229Sraj	isync
472186229Sraj	tlbre
473176771Sraj
474242526Smarcel	/* Prepare and write temp entry */
475186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
476222400Smarcel	rlwimi	%r3, %r28, 16, 12, 15	/* Select temp entry */
477186229Sraj	mtspr	SPR_MAS0, %r3
478176771Sraj	isync
479186229Sraj	mfspr	%r5, SPR_MAS1
480186229Sraj	li	%r4, 1			/* AS=1 */
481186229Sraj	rlwimi	%r5, %r4, 12, 19, 19
482186229Sraj	li	%r4, 0			/* Global mapping, TID=0 */
483186229Sraj	rlwimi	%r5, %r4, 16, 8, 15
484186229Sraj	oris	%r5, %r5, (MAS1_VALID | MAS1_IPROT)@h
485186229Sraj	mtspr	SPR_MAS1, %r5
486176771Sraj	isync
487186229Sraj	tlbwe
488186229Sraj	isync
489176771Sraj	msync
490176771Sraj	blr
491176771Sraj
492176771Sraj/*
493186229Sraj * Loops over TLB1, invalidates all entries skipping the one which currently
494186229Sraj * maps this code.
495176771Sraj *
496222400Smarcel * r29		current entry
497186229Sraj * r3-r5	scratched
498176771Sraj */
499186229Srajtlb1_inval_all_but_current:
500186229Sraj	mr	%r6, %r3
501186229Sraj	mfspr	%r3, SPR_TLB1CFG	/* Get number of entries */
502186229Sraj	andi.	%r3, %r3, TLBCFG_NENTRY_MASK@l
503186229Sraj	li	%r4, 0			/* Start from Entry 0 */
504186229Sraj1:	lis	%r5, MAS0_TLBSEL1@h
505186229Sraj	rlwimi	%r5, %r4, 16, 12, 15
506186229Sraj	mtspr	SPR_MAS0, %r5
507176771Sraj	isync
508186229Sraj	tlbre
509186229Sraj	mfspr	%r5, SPR_MAS1
510222400Smarcel	cmpw	%r4, %r29		/* our current entry? */
511186229Sraj	beq	2f
512186229Sraj	rlwinm	%r5, %r5, 0, 2, 31	/* clear VALID and IPROT bits */
513186229Sraj	mtspr	SPR_MAS1, %r5
514176771Sraj	isync
515186229Sraj	tlbwe
516186229Sraj	isync
517176771Sraj	msync
518186229Sraj2:	addi	%r4, %r4, 1
519186229Sraj	cmpw	%r4, %r3		/* Check if this is the last entry */
520186229Sraj	bne	1b
521176771Sraj	blr
522176771Sraj
523192532Sraj#ifdef SMP
524192532Sraj__boot_page_padding:
525192532Sraj	/*
526192532Sraj	 * Boot page needs to be exactly 4K, with the last word of this page
527192532Sraj	 * acting as the reset vector, so we need to stuff the remainder.
528192532Sraj	 * Upon release from holdoff CPU fetches the last word of the boot
529192532Sraj	 * page.
530192532Sraj	 */
531192532Sraj	.space	4092 - (__boot_page_padding - __boot_page)
532192532Sraj	b	__boot_page
533192532Sraj#endif /* SMP */
534192532Sraj
535186229Sraj/************************************************************************/
536186229Sraj/* locore subroutines */
537186229Sraj/************************************************************************/
538186229Sraj
539186229Srajivor_setup:
540186229Sraj	/* Set base address of interrupt handler routines */
541186229Sraj	lis	%r3, interrupt_vector_base@h
542186229Sraj	mtspr	SPR_IVPR, %r3
543186229Sraj
544186229Sraj	/* Assign interrupt handler routines offsets */
545186229Sraj	li	%r3, int_critical_input@l
546186229Sraj	mtspr	SPR_IVOR0, %r3
547186229Sraj	li	%r3, int_machine_check@l
548186229Sraj	mtspr	SPR_IVOR1, %r3
549186229Sraj	li	%r3, int_data_storage@l
550186229Sraj	mtspr	SPR_IVOR2, %r3
551186229Sraj	li	%r3, int_instr_storage@l
552186229Sraj	mtspr	SPR_IVOR3, %r3
553186229Sraj	li	%r3, int_external_input@l
554186229Sraj	mtspr	SPR_IVOR4, %r3
555186229Sraj	li	%r3, int_alignment@l
556186229Sraj	mtspr	SPR_IVOR5, %r3
557186229Sraj	li	%r3, int_program@l
558186229Sraj	mtspr	SPR_IVOR6, %r3
559186229Sraj	li	%r3, int_syscall@l
560186229Sraj	mtspr	SPR_IVOR8, %r3
561186229Sraj	li	%r3, int_decrementer@l
562186229Sraj	mtspr	SPR_IVOR10, %r3
563186229Sraj	li	%r3, int_fixed_interval_timer@l
564186229Sraj	mtspr	SPR_IVOR11, %r3
565186229Sraj	li	%r3, int_watchdog@l
566186229Sraj	mtspr	SPR_IVOR12, %r3
567186229Sraj	li	%r3, int_data_tlb_error@l
568186229Sraj	mtspr	SPR_IVOR13, %r3
569186229Sraj	li	%r3, int_inst_tlb_error@l
570186229Sraj	mtspr	SPR_IVOR14, %r3
571186229Sraj	li	%r3, int_debug@l
572186229Sraj	mtspr	SPR_IVOR15, %r3
573186229Sraj	blr
574186229Sraj
575176771Sraj/*
576187149Sraj * void tid_flush(tlbtid_t tid);
577187149Sraj *
578187149Sraj * Invalidate all TLB0 entries which match the given TID. Note this is
579187149Sraj * dedicated for cases when invalidation(s) should NOT be propagated to other
580187149Sraj * CPUs.
581187149Sraj *
582187149Sraj * Global vars tlb0_ways, tlb0_entries_per_way are assumed to have been set up
583187149Sraj * correctly (by tlb0_get_tlbconf()).
584187149Sraj *
585187149Sraj */
586187149SrajENTRY(tid_flush)
587187149Sraj	cmpwi	%r3, TID_KERNEL
588187149Sraj	beq	tid_flush_end	/* don't evict kernel translations */
589187149Sraj
590187149Sraj	/* Number of TLB0 ways */
591187149Sraj	lis	%r4, tlb0_ways@h
592187149Sraj	ori	%r4, %r4, tlb0_ways@l
593187149Sraj	lwz	%r4, 0(%r4)
594187149Sraj
595187149Sraj	/* Number of entries / way */
596187149Sraj	lis	%r5, tlb0_entries_per_way@h
597187149Sraj	ori	%r5, %r5, tlb0_entries_per_way@l
598187149Sraj	lwz	%r5, 0(%r5)
599187149Sraj
600187149Sraj	/* Disable interrupts */
601187149Sraj	mfmsr	%r10
602187149Sraj	wrteei	0
603187149Sraj
604187149Sraj	li	%r6, 0		/* ways counter */
605187149Srajloop_ways:
606187149Sraj	li	%r7, 0		/* entries [per way] counter */
607187149Srajloop_entries:
608187149Sraj	/* Select TLB0 and ESEL (way) */
609187149Sraj	lis	%r8, MAS0_TLBSEL0@h
610187149Sraj	rlwimi	%r8, %r6, 16, 14, 15
611187149Sraj	mtspr	SPR_MAS0, %r8
612187149Sraj	isync
613187149Sraj
614187149Sraj	/* Select EPN (entry within the way) */
615187149Sraj	rlwinm	%r8, %r7, 12, 13, 19
616187149Sraj	mtspr	SPR_MAS2, %r8
617187149Sraj	isync
618187149Sraj	tlbre
619187149Sraj
620187149Sraj	/* Check if valid entry */
621187149Sraj	mfspr	%r8, SPR_MAS1
622187149Sraj	andis.	%r9, %r8, MAS1_VALID@h
623187149Sraj	beq	next_entry	/* invalid entry */
624187149Sraj
625187149Sraj	/* Check if this is our TID */
626187149Sraj	rlwinm	%r9, %r8, 16, 24, 31
627187149Sraj
628187149Sraj	cmplw	%r9, %r3
629187149Sraj	bne	next_entry	/* not our TID */
630187149Sraj
631187149Sraj	/* Clear VALID bit */
632187149Sraj	rlwinm	%r8, %r8, 0, 1, 31
633187149Sraj	mtspr	SPR_MAS1, %r8
634187149Sraj	isync
635187149Sraj	tlbwe
636187149Sraj	isync
637187149Sraj	msync
638187149Sraj
639187149Srajnext_entry:
640187149Sraj	addi	%r7, %r7, 1
641187149Sraj	cmpw	%r7, %r5
642187149Sraj	bne	loop_entries
643187149Sraj
644187149Sraj	/* Next way */
645187149Sraj	addi	%r6, %r6, 1
646187149Sraj	cmpw	%r6, %r4
647187149Sraj	bne	loop_ways
648187149Sraj
649187149Sraj	/* Restore MSR (possibly re-enable interrupts) */
650187149Sraj	mtmsr	%r10
651187149Sraj	isync
652187149Sraj
653187149Srajtid_flush_end:
654187149Sraj	blr
655187149Sraj
656187149Sraj/*
657176771Sraj * Cache disable/enable/inval sequences according
658176771Sraj * to section 2.16 of E500CORE RM.
659176771Sraj */
660176771SrajENTRY(dcache_inval)
661176771Sraj	/* Invalidate d-cache */
662176771Sraj	mfspr	%r3, SPR_L1CSR0
663176771Sraj	ori	%r3, %r3, (L1CSR0_DCFI | L1CSR0_DCLFR)@l
664176771Sraj	msync
665176771Sraj	isync
666176771Sraj	mtspr	SPR_L1CSR0, %r3
667176771Sraj	isync
668186230Sraj1:	mfspr	%r3, SPR_L1CSR0
669186230Sraj	andi.	%r3, %r3, L1CSR0_DCFI
670186230Sraj	bne	1b
671176771Sraj	blr
672176771Sraj
673176771SrajENTRY(dcache_disable)
674176771Sraj	/* Disable d-cache */
675176771Sraj	mfspr	%r3, SPR_L1CSR0
676176771Sraj	li	%r4, L1CSR0_DCE@l
677176771Sraj	not	%r4, %r4
678176771Sraj	and	%r3, %r3, %r4
679176771Sraj	msync
680176771Sraj	isync
681176771Sraj	mtspr	SPR_L1CSR0, %r3
682176771Sraj	isync
683176771Sraj	blr
684176771Sraj
685176771SrajENTRY(dcache_enable)
686176771Sraj	/* Enable d-cache */
687176771Sraj	mfspr	%r3, SPR_L1CSR0
688176771Sraj	oris	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@h
689176771Sraj	ori	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@l
690176771Sraj	msync
691176771Sraj	isync
692176771Sraj	mtspr	SPR_L1CSR0, %r3
693176771Sraj	isync
694176771Sraj	blr
695176771Sraj
696176771SrajENTRY(icache_inval)
697176771Sraj	/* Invalidate i-cache */
698176771Sraj	mfspr	%r3, SPR_L1CSR1
699176771Sraj	ori	%r3, %r3, (L1CSR1_ICFI | L1CSR1_ICLFR)@l
700176771Sraj	isync
701176771Sraj	mtspr	SPR_L1CSR1, %r3
702176771Sraj	isync
703186230Sraj1:	mfspr	%r3, SPR_L1CSR1
704186230Sraj	andi.	%r3, %r3, L1CSR1_ICFI
705186230Sraj	bne	1b
706176771Sraj	blr
707176771Sraj
708176771SrajENTRY(icache_disable)
709176771Sraj	/* Disable i-cache */
710176771Sraj	mfspr	%r3, SPR_L1CSR1
711176771Sraj	li	%r4, L1CSR1_ICE@l
712176771Sraj	not	%r4, %r4
713176771Sraj	and	%r3, %r3, %r4
714176771Sraj	isync
715176771Sraj	mtspr	SPR_L1CSR1, %r3
716176771Sraj	isync
717176771Sraj	blr
718176771Sraj
719176771SrajENTRY(icache_enable)
720176771Sraj	/* Enable i-cache */
721176771Sraj	mfspr	%r3, SPR_L1CSR1
722176771Sraj	oris	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@h
723176771Sraj	ori	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@l
724176771Sraj	isync
725176771Sraj	mtspr	SPR_L1CSR1, %r3
726176771Sraj	isync
727176771Sraj	blr
728176771Sraj
729176771Sraj/*
730176771Sraj * int setfault()
731176771Sraj *
732176771Sraj * Similar to setjmp to setup for handling faults on accesses to user memory.
733176771Sraj * Any routine using this may only call bcopy, either the form below,
734176771Sraj * or the (currently used) C code optimized, so it doesn't use any non-volatile
735176771Sraj * registers.
736176771Sraj */
737176771Sraj	.globl	setfault
738176771Srajsetfault:
739176771Sraj	mflr	%r0
740176771Sraj	mfsprg0	%r4
741258002Snwhitehorn	lwz	%r4, TD_PCB(%r2)
742176771Sraj	stw	%r3, PCB_ONFAULT(%r4)
743176771Sraj	mfcr	%r10
744176771Sraj	mfctr	%r11
745176771Sraj	mfxer	%r12
746176771Sraj	stw	%r0, 0(%r3)
747176771Sraj	stw	%r1, 4(%r3)
748176771Sraj	stw	%r2, 8(%r3)
749176771Sraj	stmw	%r10, 12(%r3)		/* store CR, CTR, XER, [r13 .. r31] */
750176771Sraj	li	%r3, 0			/* return FALSE */
751176771Sraj	blr
752176771Sraj
753176771Sraj/************************************************************************/
754176771Sraj/* Data section								*/
755176771Sraj/************************************************************************/
756176771Sraj	.data
757176771Sraj	.align	4
758182198Srajtmpstack:
759182198Sraj	.space	TMPSTACKSZ
760277334Snwhitehorntmpstackbound:
761277334Snwhitehorn	.space 10240	/* XXX: this really should not be necessary */
762176771Sraj
763176771Sraj/*
764176771Sraj * Compiled KERNBASE locations
765176771Sraj */
766176771Sraj	.globl	kernbase
767176771Sraj	.set	kernbase, KERNBASE
768176771Sraj
769176771Sraj#include <powerpc/booke/trap_subr.S>
770