1176771Sraj/*-
2192532Sraj * Copyright (C) 2007-2009 Semihalf, Rafal Jaworowski <raj@semihalf.com>
3176771Sraj * Copyright (C) 2006 Semihalf, Marian Balakowicz <m8@semihalf.com>
4176771Sraj * All rights reserved.
5176771Sraj *
6176771Sraj * Redistribution and use in source and binary forms, with or without
7176771Sraj * modification, are permitted provided that the following conditions
8176771Sraj * are met:
9176771Sraj * 1. Redistributions of source code must retain the above copyright
10176771Sraj *    notice, this list of conditions and the following disclaimer.
11176771Sraj * 2. Redistributions in binary form must reproduce the above copyright
12176771Sraj *    notice, this list of conditions and the following disclaimer in the
13176771Sraj *    documentation and/or other materials provided with the distribution.
14176771Sraj *
15176771Sraj * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
16176771Sraj * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
17176771Sraj * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN
18176771Sraj * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
19176771Sraj * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
20176771Sraj * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21176771Sraj * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
22176771Sraj * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
23176771Sraj * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24176771Sraj * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25176771Sraj *
26176771Sraj * $FreeBSD: stable/11/sys/powerpc/booke/locore.S 330446 2018-03-05 06:59:30Z eadler $
27176771Sraj */
28176771Sraj
29176771Sraj#include "assym.s"
30176771Sraj
31281713Sjhibbits#include "opt_hwpmc_hooks.h"
32281713Sjhibbits
33186229Sraj#include <machine/asm.h>
34191375Sraj#include <machine/hid.h>
35176771Sraj#include <machine/param.h>
36176771Sraj#include <machine/spr.h>
37176771Sraj#include <machine/pte.h>
38176771Sraj#include <machine/trap.h>
39176771Sraj#include <machine/vmparam.h>
40176771Sraj#include <machine/tlb.h>
41176771Sraj
42182198Sraj#define TMPSTACKSZ	16384
43182198Sraj
44184319Smarcel	.text
45184319Smarcel	.globl	btext
46184319Smarcelbtext:
47184319Smarcel
48176771Sraj/*
49176771Sraj * This symbol is here for the benefit of kvm_mkdb, and is supposed to
50176771Sraj * mark the start of kernel text.
51176771Sraj */
52176771Sraj	.globl	kernel_text
53176771Srajkernel_text:
54176771Sraj
55176771Sraj/*
56176771Sraj * Startup entry.  Note, this must be the first thing in the text segment!
57176771Sraj */
58176771Sraj	.text
59176771Sraj	.globl	__start
60176771Sraj__start:
61176771Sraj
62176771Sraj/*
63186229Sraj * Assumptions on the boot loader:
64286977Sjhibbits *  - System memory starts from physical address 0
65286977Sjhibbits *  - It's mapped by a single TLB1 entry
66176771Sraj *  - TLB1 mapping is 1:1 pa to va
67286977Sjhibbits *  - Kernel is loaded at 64MB boundary
68286977Sjhibbits *  - All PID registers are set to the same value
69186229Sraj *  - CPU is running in AS=0
70176771Sraj *
71186229Sraj * Registers contents provided by the loader(8):
72176771Sraj *	r1	: stack pointer
73176771Sraj *	r3	: metadata pointer
74176771Sraj *
75176771Sraj * We rearrange the TLB1 layout as follows:
76286977Sjhibbits *  - Find TLB1 entry we started in
77286977Sjhibbits *  - Make sure it's protected, invalidate other entries
78286977Sjhibbits *  - Create temp entry in the second AS (make sure it's not TLB[1])
79286977Sjhibbits *  - Switch to temp mapping
80286977Sjhibbits *  - Map 64MB of RAM in TLB1[1]
81286977Sjhibbits *  - Use AS=1, set EPN to KERNBASE and RPN to kernel load address
82330446Seadler *  - Switch to TLB1[1] mapping
83286977Sjhibbits *  - Invalidate temp mapping
84176771Sraj *
85186229Sraj * locore registers use:
86176771Sraj *	r1	: stack pointer
87186229Sraj *	r2	: trace pointer (AP only, for early diagnostics)
88224611Smarcel *	r3-r27	: scratch registers
89222400Smarcel *	r28	: temp TLB1 entry
90222400Smarcel *	r29	: initial TLB1 entry we started in
91222400Smarcel *	r30-r31	: arguments (metadata pointer)
92176771Sraj */
93176771Sraj
94176771Sraj/*
95222400Smarcel * Keep arguments in r30 & r31 for later use.
96176771Sraj */
97222400Smarcel	mr	%r30, %r3
98222400Smarcel	mr	%r31, %r4
99176771Sraj
100176771Sraj/*
101176771Sraj * Initial cleanup
102176771Sraj */
103186229Sraj	li	%r3, PSL_DE	/* Keep debug exceptions for CodeWarrior. */
104186229Sraj	mtmsr	%r3
105176771Sraj	isync
106176771Sraj
107298237Sjhibbits/*
108298237Sjhibbits * Initial HIDs configuration
109298237Sjhibbits */
110298237Sjhibbits1:
111286977Sjhibbits	mfpvr	%r3
112286977Sjhibbits	rlwinm	%r3, %r3, 16, 16, 31
113286977Sjhibbits
114286977Sjhibbits	lis	%r4, HID0_E500_DEFAULT_SET@h
115286977Sjhibbits	ori	%r4, %r4, HID0_E500_DEFAULT_SET@l
116286977Sjhibbits
117286977Sjhibbits	/* Check for e500mc and e5500 */
118286977Sjhibbits	cmpli	0, 0, %r3, FSL_E500mc
119286977Sjhibbits	bne	2f
120286977Sjhibbits
121286977Sjhibbits	lis	%r4, HID0_E500MC_DEFAULT_SET@h
122286977Sjhibbits	ori	%r4, %r4, HID0_E500MC_DEFAULT_SET@l
123286977Sjhibbits	b	3f
124286977Sjhibbits2:
125286977Sjhibbits	cmpli	0, 0, %r3, FSL_E5500
126286977Sjhibbits	bne	3f
127286977Sjhibbits
128286977Sjhibbits	lis	%r4, HID0_E5500_DEFAULT_SET@h
129286977Sjhibbits	ori	%r4, %r4, HID0_E5500_DEFAULT_SET@l
130286977Sjhibbits
131286977Sjhibbits3:
132286977Sjhibbits	mtspr	SPR_HID0, %r4
133191375Sraj	isync
134286977Sjhibbits
135286977Sjhibbits/*
136286977Sjhibbits * E500mc and E5500 do not have HID1 register, so skip HID1 setup on
137286977Sjhibbits * this core.
138286977Sjhibbits */
139286977Sjhibbits	cmpli	0, 0, %r3, FSL_E500mc
140286977Sjhibbits	beq	1f
141286977Sjhibbits	cmpli	0, 0, %r3, FSL_E5500
142286977Sjhibbits	beq	1f
143286977Sjhibbits
144191375Sraj	lis	%r3, HID1_E500_DEFAULT_SET@h
145191375Sraj	ori	%r3, %r3, HID1_E500_DEFAULT_SET@l
146191375Sraj	mtspr	SPR_HID1, %r3
147191375Sraj	isync
148286977Sjhibbits1:
149186229Sraj	/* Invalidate all entries in TLB0 */
150186229Sraj	li	%r3, 0
151186229Sraj	bl	tlb_inval_all
152176771Sraj
153224611Smarcel	cmpwi	%r30, 0
154224611Smarcel	beq	done_mapping
155224611Smarcel
156176771Sraj/*
157186229Sraj * Locate the TLB1 entry that maps this code
158176771Sraj */
159186229Sraj	bl	1f
160186229Sraj1:	mflr	%r3
161222400Smarcel	bl	tlb1_find_current	/* the entry found is returned in r29 */
162176771Sraj
163186229Sraj	bl	tlb1_inval_all_but_current
164242526Smarcel
165176771Sraj/*
166186229Sraj * Create temporary mapping in AS=1 and switch to it
167176771Sraj */
168186229Sraj	bl	tlb1_temp_mapping_as1
169176771Sraj
170186229Sraj	mfmsr	%r3
171186229Sraj	ori	%r3, %r3, (PSL_IS | PSL_DS)
172186229Sraj	bl	2f
173186229Sraj2:	mflr	%r4
174186229Sraj	addi	%r4, %r4, 20
175186229Sraj	mtspr	SPR_SRR0, %r4
176186229Sraj	mtspr	SPR_SRR1, %r3
177176771Sraj	rfi				/* Switch context */
178176771Sraj
179176771Sraj/*
180176771Sraj * Invalidate initial entry
181176771Sraj */
182222400Smarcel	mr	%r3, %r29
183176771Sraj	bl	tlb1_inval_entry
184176771Sraj
185176771Sraj/*
186176771Sraj * Setup final mapping in TLB1[1] and switch to it
187176771Sraj */
188286977Sjhibbits	/* Final kernel mapping, map in 64 MB of RAM */
189186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
190222391Smarcel	li	%r4, 0			/* Entry 0 */
191286977Sjhibbits	rlwimi	%r3, %r4, 16, 10, 15
192186229Sraj	mtspr	SPR_MAS0, %r3
193176771Sraj	isync
194176771Sraj
195257190Snwhitehorn	li	%r3, (TLB_SIZE_64M << MAS1_TSIZE_SHIFT)@l
196186229Sraj	oris	%r3, %r3, (MAS1_VALID | MAS1_IPROT)@h
197186229Sraj	mtspr	SPR_MAS1, %r3		/* note TS was not filled, so it's TS=0 */
198176771Sraj	isync
199176771Sraj
200186229Sraj	lis	%r3, KERNBASE@h
201186229Sraj	ori	%r3, %r3, KERNBASE@l	/* EPN = KERNBASE */
202192532Sraj#ifdef SMP
203298237Sjhibbits	ori	%r3, %r3, (_TLB_ENTRY_SHARED | MAS2_M)@l /* WIMGE = 0b00100 */
204192532Sraj#endif
205186229Sraj	mtspr	SPR_MAS2, %r3
206176771Sraj	isync
207176771Sraj
208186229Sraj	/* Discover phys load address */
209186229Sraj	bl	3f
210186229Sraj3:	mflr	%r4			/* Use current address */
211286977Sjhibbits	rlwinm	%r4, %r4, 0, 0, 5	/* 64MB alignment mask */
212186229Sraj	ori	%r4, %r4, (MAS3_SX | MAS3_SW | MAS3_SR)@l
213186229Sraj	mtspr	SPR_MAS3, %r4		/* Set RPN and protection */
214176771Sraj	isync
215286977Sjhibbits	bl	zero_mas7
216286977Sjhibbits	bl	zero_mas8
217176771Sraj	tlbwe
218176771Sraj	isync
219176771Sraj	msync
220176771Sraj
221176771Sraj	/* Switch to the above TLB1[1] mapping */
222186229Sraj	bl	4f
223186229Sraj4:	mflr	%r4
224186229Sraj	rlwinm	%r4, %r4, 0, 8, 31	/* Current offset from kernel load address */
225186229Sraj	rlwinm	%r3, %r3, 0, 0, 19
226186229Sraj	add	%r4, %r4, %r3		/* Convert to kernel virtual address */
227186229Sraj	addi	%r4, %r4, 36
228186229Sraj	li	%r3, PSL_DE		/* Note AS=0 */
229186229Sraj	mtspr   SPR_SRR0, %r4
230186229Sraj	mtspr   SPR_SRR1, %r3
231176771Sraj	rfi
232176771Sraj
233176771Sraj/*
234176771Sraj * Invalidate temp mapping
235176771Sraj */
236222400Smarcel	mr	%r3, %r28
237176771Sraj	bl	tlb1_inval_entry
238176771Sraj
239224611Smarceldone_mapping:
240186229Sraj
241186229Sraj/*
242176771Sraj * Setup a temporary stack
243176771Sraj */
244279750Snwhitehorn	bl	1f
245279750Snwhitehorn	.long tmpstack-.
246279750Snwhitehorn1:	mflr	%r1
247279750Snwhitehorn	lwz	%r2,0(%r1)
248279750Snwhitehorn	add	%r1,%r1,%r2
249277334Snwhitehorn	addi	%r1, %r1, (TMPSTACKSZ - 16)
250176771Sraj
251176771Sraj/*
252279750Snwhitehorn * Relocate kernel
253279750Snwhitehorn */
254279750Snwhitehorn	bl      1f
255279750Snwhitehorn	.long   _DYNAMIC-.
256279750Snwhitehorn	.long   _GLOBAL_OFFSET_TABLE_-.
257279750Snwhitehorn1:	mflr    %r5
258279750Snwhitehorn	lwz	%r3,0(%r5)	/* _DYNAMIC in %r3 */
259279750Snwhitehorn	add	%r3,%r3,%r5
260279750Snwhitehorn	lwz	%r4,4(%r5)	/* GOT pointer */
261279750Snwhitehorn	add	%r4,%r4,%r5
262279750Snwhitehorn	lwz	%r4,4(%r4)	/* got[0] is _DYNAMIC link addr */
263279750Snwhitehorn	subf	%r4,%r4,%r3	/* subtract to calculate relocbase */
264279750Snwhitehorn	bl	elf_reloc_self
265279750Snwhitehorn
266279750Snwhitehorn/*
267186289Sraj * Initialise exception vector offsets
268176771Sraj */
269176771Sraj	bl	ivor_setup
270176771Sraj
271176771Sraj/*
272186229Sraj * Set up arguments and jump to system initialization code
273176771Sraj */
274222400Smarcel	mr	%r3, %r30
275222400Smarcel	mr	%r4, %r31
276176771Sraj
277236141Sraj	/* Prepare core */
278222400Smarcel	bl	booke_init
279182198Sraj
280186229Sraj	/* Switch to thread0.td_kstack now */
281182198Sraj	mr	%r1, %r3
282182198Sraj	li	%r3, 0
283182198Sraj	stw	%r3, 0(%r1)
284182198Sraj
285186229Sraj	/* Machine independet part, does not return */
286186229Sraj	bl	mi_startup
287186229Sraj	/* NOT REACHED */
288186229Sraj5:	b	5b
289176771Sraj
290192532Sraj
291192532Sraj#ifdef SMP
292192532Sraj/************************************************************************/
293192532Sraj/* AP Boot page */
294192532Sraj/************************************************************************/
295192532Sraj	.text
296192532Sraj	.globl	__boot_page
297192532Sraj	.align	12
298192532Sraj__boot_page:
299192532Sraj	bl	1f
300192532Sraj
301298237Sjhibbits	.globl	bp_trace
302298237Sjhibbitsbp_trace:
303192532Sraj	.long	0
304192532Sraj
305298237Sjhibbits	.globl	bp_kernload
306298237Sjhibbitsbp_kernload:
307298237Sjhibbits	.long	0
308235932Smarcel
309186229Sraj/*
310192532Sraj * Initial configuration
311192532Sraj */
312298237Sjhibbits1:
313298237Sjhibbits	mflr    %r31		/* r31 hold the address of bp_trace */
314235932Smarcel
315192532Sraj	/* Set HIDs */
316286977Sjhibbits	mfpvr	%r3
317286977Sjhibbits	rlwinm	%r3, %r3, 16, 16, 31
318286977Sjhibbits
319286977Sjhibbits	/* HID0 for E500 is default */
320286977Sjhibbits	lis	%r4, HID0_E500_DEFAULT_SET@h
321286977Sjhibbits	ori	%r4, %r4, HID0_E500_DEFAULT_SET@l
322286977Sjhibbits
323286977Sjhibbits	cmpli	0, 0, %r3, FSL_E500mc
324286977Sjhibbits	bne	2f
325286977Sjhibbits	lis	%r4, HID0_E500MC_DEFAULT_SET@h
326286977Sjhibbits	ori	%r4, %r4, HID0_E500MC_DEFAULT_SET@l
327286977Sjhibbits	b	3f
328286977Sjhibbits2:
329286977Sjhibbits	cmpli	0, 0, %r3, FSL_E5500
330286977Sjhibbits	bne	3f
331286977Sjhibbits	lis	%r4, HID0_E5500_DEFAULT_SET@h
332286977Sjhibbits	ori	%r4, %r4, HID0_E5500_DEFAULT_SET@l
333286977Sjhibbits3:
334286977Sjhibbits	mtspr	SPR_HID0, %r4
335192532Sraj	isync
336286977Sjhibbits
337192532Sraj	/* Enable branch prediction */
338192532Sraj	li	%r3, BUCSR_BPEN
339192532Sraj	mtspr	SPR_BUCSR, %r3
340192532Sraj	isync
341192532Sraj
342192532Sraj	/* Invalidate all entries in TLB0 */
343192532Sraj	li	%r3, 0
344192532Sraj	bl	tlb_inval_all
345192532Sraj
346192532Sraj/*
347192532Sraj * Find TLB1 entry which is translating us now
348192532Sraj */
349192532Sraj	bl	2f
350192532Sraj2:	mflr	%r3
351222400Smarcel	bl	tlb1_find_current	/* the entry number found is in r29 */
352192532Sraj
353192532Sraj	bl	tlb1_inval_all_but_current
354242526Smarcel
355192532Sraj/*
356192532Sraj * Create temporary translation in AS=1 and switch to it
357192532Sraj */
358298237Sjhibbits
359192532Sraj	bl	tlb1_temp_mapping_as1
360192532Sraj
361192532Sraj	mfmsr	%r3
362192532Sraj	ori	%r3, %r3, (PSL_IS | PSL_DS)
363192532Sraj	bl	3f
364192532Sraj3:	mflr	%r4
365192532Sraj	addi	%r4, %r4, 20
366192532Sraj	mtspr	SPR_SRR0, %r4
367192532Sraj	mtspr	SPR_SRR1, %r3
368192532Sraj	rfi				/* Switch context */
369192532Sraj
370192532Sraj/*
371192532Sraj * Invalidate initial entry
372192532Sraj */
373222400Smarcel	mr	%r3, %r29
374192532Sraj	bl	tlb1_inval_entry
375192532Sraj
376192532Sraj/*
377192532Sraj * Setup final mapping in TLB1[1] and switch to it
378192532Sraj */
379298237Sjhibbits	/* Final kernel mapping, map in 64 MB of RAM */
380298237Sjhibbits	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
381298237Sjhibbits	li	%r4, 0			/* Entry 0 */
382298237Sjhibbits	rlwimi	%r3, %r4, 16, 4, 15
383192532Sraj	mtspr	SPR_MAS0, %r3
384192532Sraj	isync
385298237Sjhibbits
386298237Sjhibbits	li	%r3, (TLB_SIZE_64M << MAS1_TSIZE_SHIFT)@l
387298237Sjhibbits	oris	%r3, %r3, (MAS1_VALID | MAS1_IPROT)@h
388298237Sjhibbits	mtspr	SPR_MAS1, %r3		/* note TS was not filled, so it's TS=0 */
389192532Sraj	isync
390298237Sjhibbits
391298237Sjhibbits	lis	%r3, KERNBASE@h
392298237Sjhibbits	ori	%r3, %r3, KERNBASE@l	/* EPN = KERNBASE */
393298237Sjhibbits	ori	%r3, %r3, (_TLB_ENTRY_SHARED | MAS2_M)@l /* WIMGE = 0b00100 */
394192532Sraj	mtspr	SPR_MAS2, %r3
395192532Sraj	isync
396298237Sjhibbits
397298237Sjhibbits	/* Retrieve kernel load [physical] address from bp_kernload */
398298237Sjhibbits	bl	4f
399298237Sjhibbits	.long	bp_kernload
400298237Sjhibbits	.long	__boot_page
401298237Sjhibbits4:	mflr	%r3
402298237Sjhibbits	lwz	%r4, 0(%r3)
403298237Sjhibbits	lwz	%r5, 4(%r3)
404298237Sjhibbits	rlwinm	%r3, %r3, 0, 0, 19
405298237Sjhibbits	sub	%r4, %r4, %r5	/* offset of bp_kernload within __boot_page */
406298237Sjhibbits	lwzx	%r3, %r4, %r3
407298237Sjhibbits
408298237Sjhibbits	/* Set RPN and protection */
409298237Sjhibbits	ori	%r3, %r3, (MAS3_SX | MAS3_SW | MAS3_SR)@l
410192532Sraj	mtspr	SPR_MAS3, %r3
411192532Sraj	isync
412310440Sjhibbits	bl	zero_mas7
413310440Sjhibbits	bl	zero_mas8
414192532Sraj	tlbwe
415192532Sraj	isync
416192532Sraj	msync
417192532Sraj
418192532Sraj	/* Switch to the final mapping */
419192532Sraj	bl	5f
420298237Sjhibbits5:	mflr	%r3
421192532Sraj	rlwinm	%r3, %r3, 0, 0xfff	/* Offset from boot page start */
422192532Sraj	add	%r3, %r3, %r5		/* Make this virtual address */
423192532Sraj	addi	%r3, %r3, 32
424192532Sraj	li	%r4, 0			/* Note AS=0 */
425192532Sraj	mtspr	SPR_SRR0, %r3
426192532Sraj	mtspr	SPR_SRR1, %r4
427192532Sraj	rfi
428192532Sraj
429192532Sraj/*
430192532Sraj * At this point we're running at virtual addresses KERNBASE and beyond so
431192532Sraj * it's allowed to directly access all locations the kernel was linked
432192532Sraj * against.
433192532Sraj */
434192532Sraj
435192532Sraj/*
436192532Sraj * Invalidate temp mapping
437192532Sraj */
438222400Smarcel	mr	%r3, %r28
439192532Sraj	bl	tlb1_inval_entry
440192532Sraj
441192532Sraj/*
442192532Sraj * Setup a temporary stack
443192532Sraj */
444279750Snwhitehorn	bl	1f
445279750Snwhitehorn	.long tmpstack-.
446279750Snwhitehorn1:	mflr	%r1
447279750Snwhitehorn	lwz	%r2,0(%r1)
448279750Snwhitehorn	add	%r1,%r1,%r2
449298237Sjhibbits	stw	%r1, 0(%r1)
450277334Snwhitehorn	addi	%r1, %r1, (TMPSTACKSZ - 16)
451192532Sraj
452192532Sraj/*
453192532Sraj * Initialise exception vector offsets
454192532Sraj */
455192532Sraj	bl	ivor_setup
456192532Sraj
457192532Sraj	/*
458192532Sraj	 * Assign our pcpu instance
459192532Sraj	 */
460279750Snwhitehorn	bl	1f
461279750Snwhitehorn	.long ap_pcpu-.
462279750Snwhitehorn1:	mflr	%r4
463279750Snwhitehorn	lwz	%r3, 0(%r4)
464279750Snwhitehorn	add	%r3, %r3, %r4
465192532Sraj	lwz	%r3, 0(%r3)
466192532Sraj	mtsprg0	%r3
467192532Sraj
468192532Sraj	bl	pmap_bootstrap_ap
469192532Sraj
470192532Sraj	bl	cpudep_ap_bootstrap
471192532Sraj	/* Switch to the idle thread's kstack */
472192532Sraj	mr	%r1, %r3
473192532Sraj
474192532Sraj	bl	machdep_ap_bootstrap
475192532Sraj
476192532Sraj	/* NOT REACHED */
477192532Sraj6:	b	6b
478192532Sraj#endif /* SMP */
479192532Sraj
480298237Sjhibbits#if defined (BOOKE_E500)
481192532Sraj/*
482186229Sraj * Invalidate all entries in the given TLB.
483186229Sraj *
484186229Sraj * r3	TLBSEL
485186229Sraj */
486186229Srajtlb_inval_all:
487286977Sjhibbits	rlwinm	%r3, %r3, 3, (1 << 3)	/* TLBSEL */
488286977Sjhibbits	ori	%r3, %r3, (1 << 2)	/* INVALL */
489186229Sraj	tlbivax	0, %r3
490186229Sraj	isync
491186229Sraj	msync
492176771Sraj
493186229Sraj	tlbsync
494186229Sraj	msync
495186229Sraj	blr
496186229Sraj
497186229Sraj/*
498222400Smarcel * expects address to look up in r3, returns entry number in r29
499186229Sraj *
500186229Sraj * FIXME: the hidden assumption is we are now running in AS=0, but we should
501186229Sraj * retrieve actual AS from MSR[IS|DS] and put it in MAS6[SAS]
502186229Sraj */
503186229Srajtlb1_find_current:
504186229Sraj	mfspr	%r17, SPR_PID0
505186229Sraj	slwi	%r17, %r17, MAS6_SPID0_SHIFT
506186229Sraj	mtspr	SPR_MAS6, %r17
507176771Sraj	isync
508186229Sraj	tlbsx	0, %r3
509186229Sraj	mfspr	%r17, SPR_MAS0
510298237Sjhibbits	rlwinm	%r29, %r17, 16, 26, 31		/* MAS0[ESEL] -> r29 */
511176771Sraj
512186229Sraj	/* Make sure we have IPROT set on the entry */
513186229Sraj	mfspr	%r17, SPR_MAS1
514186229Sraj	oris	%r17, %r17, MAS1_IPROT@h
515186229Sraj	mtspr	SPR_MAS1, %r17
516176771Sraj	isync
517176771Sraj	tlbwe
518176771Sraj	isync
519176771Sraj	msync
520176771Sraj	blr
521176771Sraj
522186229Sraj/*
523186229Sraj * Invalidates a single entry in TLB1.
524186229Sraj *
525186229Sraj * r3		ESEL
526186229Sraj * r4-r5	scratched
527186229Sraj */
528186229Srajtlb1_inval_entry:
529186229Sraj	lis	%r4, MAS0_TLBSEL1@h	/* Select TLB1 */
530286977Sjhibbits	rlwimi	%r4, %r3, 16, 10, 15	/* Select our entry */
531186229Sraj	mtspr	SPR_MAS0, %r4
532186229Sraj	isync
533186229Sraj	tlbre
534186229Sraj	li	%r5, 0			/* MAS1[V] = 0 */
535186229Sraj	mtspr	SPR_MAS1, %r5
536186229Sraj	isync
537186229Sraj	tlbwe
538186229Sraj	isync
539186229Sraj	msync
540176771Sraj	blr
541176771Sraj
542176771Sraj/*
543298237Sjhibbits * r29		current entry number
544298237Sjhibbits * r28		returned temp entry
545298237Sjhibbits * r3-r5	scratched
546176771Sraj */
547186229Srajtlb1_temp_mapping_as1:
548186229Sraj	/* Read our current translation */
549186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
550286977Sjhibbits	rlwimi	%r3, %r29, 16, 10, 15	/* Select our current entry */
551186229Sraj	mtspr	SPR_MAS0, %r3
552186229Sraj	isync
553186229Sraj	tlbre
554176771Sraj
555298237Sjhibbits	/*
556298237Sjhibbits	 * Prepare and write temp entry
557298237Sjhibbits	 *
558298237Sjhibbits	 * FIXME this is not robust against overflow i.e. when the current
559298237Sjhibbits	 * entry is the last in TLB1
560298237Sjhibbits	 */
561186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
562298237Sjhibbits	addi	%r28, %r29, 1		/* Use next entry. */
563286977Sjhibbits	rlwimi	%r3, %r28, 16, 10, 15	/* Select temp entry */
564186229Sraj	mtspr	SPR_MAS0, %r3
565176771Sraj	isync
566186229Sraj	mfspr	%r5, SPR_MAS1
567186229Sraj	li	%r4, 1			/* AS=1 */
568186229Sraj	rlwimi	%r5, %r4, 12, 19, 19
569186229Sraj	li	%r4, 0			/* Global mapping, TID=0 */
570186229Sraj	rlwimi	%r5, %r4, 16, 8, 15
571186229Sraj	oris	%r5, %r5, (MAS1_VALID | MAS1_IPROT)@h
572186229Sraj	mtspr	SPR_MAS1, %r5
573176771Sraj	isync
574286977Sjhibbits	mflr	%r3
575286977Sjhibbits	bl	zero_mas7
576286977Sjhibbits	bl	zero_mas8
577286977Sjhibbits	mtlr	%r3
578186229Sraj	tlbwe
579186229Sraj	isync
580176771Sraj	msync
581176771Sraj	blr
582176771Sraj
583176771Sraj/*
584186229Sraj * Loops over TLB1, invalidates all entries skipping the one which currently
585186229Sraj * maps this code.
586176771Sraj *
587222400Smarcel * r29		current entry
588186229Sraj * r3-r5	scratched
589176771Sraj */
590186229Srajtlb1_inval_all_but_current:
591186229Sraj	mr	%r6, %r3
592186229Sraj	mfspr	%r3, SPR_TLB1CFG	/* Get number of entries */
593186229Sraj	andi.	%r3, %r3, TLBCFG_NENTRY_MASK@l
594186229Sraj	li	%r4, 0			/* Start from Entry 0 */
595186229Sraj1:	lis	%r5, MAS0_TLBSEL1@h
596286977Sjhibbits	rlwimi	%r5, %r4, 16, 10, 15
597186229Sraj	mtspr	SPR_MAS0, %r5
598176771Sraj	isync
599186229Sraj	tlbre
600186229Sraj	mfspr	%r5, SPR_MAS1
601222400Smarcel	cmpw	%r4, %r29		/* our current entry? */
602186229Sraj	beq	2f
603186229Sraj	rlwinm	%r5, %r5, 0, 2, 31	/* clear VALID and IPROT bits */
604186229Sraj	mtspr	SPR_MAS1, %r5
605176771Sraj	isync
606186229Sraj	tlbwe
607186229Sraj	isync
608176771Sraj	msync
609186229Sraj2:	addi	%r4, %r4, 1
610186229Sraj	cmpw	%r4, %r3		/* Check if this is the last entry */
611186229Sraj	bne	1b
612176771Sraj	blr
613176771Sraj
614286977Sjhibbits/*
615286977Sjhibbits * MAS7 and MAS8 conditional zeroing.
616286977Sjhibbits */
617286977Sjhibbits.globl zero_mas7
618286977Sjhibbitszero_mas7:
619286977Sjhibbits	mfpvr	%r20
620286977Sjhibbits	rlwinm	%r20, %r20, 16, 16, 31
621286977Sjhibbits	cmpli	0, 0, %r20, FSL_E500v1
622286977Sjhibbits	beq	1f
623286977Sjhibbits
624286977Sjhibbits	li	%r20, 0
625286977Sjhibbits	mtspr	SPR_MAS7, %r20
626286977Sjhibbits	isync
627286977Sjhibbits1:
628286977Sjhibbits	blr
629286977Sjhibbits
630286977Sjhibbits.globl zero_mas8
631286977Sjhibbitszero_mas8:
632286977Sjhibbits	mfpvr	%r20
633286977Sjhibbits	rlwinm	%r20, %r20, 16, 16, 31
634286977Sjhibbits	cmpli	0, 0, %r20, FSL_E500mc
635286977Sjhibbits	beq	1f
636286977Sjhibbits	cmpli	0, 0, %r20, FSL_E5500
637286977Sjhibbits	beq	1f
638286977Sjhibbits
639286977Sjhibbits	blr
640286977Sjhibbits1:
641286977Sjhibbits	li	%r20, 0
642286977Sjhibbits	mtspr	SPR_MAS8, %r20
643286977Sjhibbits	isync
644286977Sjhibbits	blr
645298237Sjhibbits#endif
646286977Sjhibbits
647192532Sraj#ifdef SMP
648298237Sjhibbits.globl __boot_tlb1
649298237Sjhibbits	/*
650298237Sjhibbits	 * The __boot_tlb1 table is used to hold BSP TLB1 entries
651298237Sjhibbits	 * marked with _TLB_ENTRY_SHARED flag during AP bootstrap.
652298237Sjhibbits	 * The BSP fills in the table in tlb_ap_prep() function. Next,
653298237Sjhibbits	 * AP loads its contents to TLB1 hardware in pmap_bootstrap_ap().
654298237Sjhibbits	 */
655298237Sjhibbits__boot_tlb1:
656298237Sjhibbits	.space TLB1_MAX_ENTRIES * TLB_ENTRY_SIZE
657298237Sjhibbits
658192532Sraj__boot_page_padding:
659192532Sraj	/*
660192532Sraj	 * Boot page needs to be exactly 4K, with the last word of this page
661192532Sraj	 * acting as the reset vector, so we need to stuff the remainder.
662192532Sraj	 * Upon release from holdoff CPU fetches the last word of the boot
663192532Sraj	 * page.
664192532Sraj	 */
665192532Sraj	.space	4092 - (__boot_page_padding - __boot_page)
666192532Sraj	b	__boot_page
667192532Sraj#endif /* SMP */
668192532Sraj
669186229Sraj/************************************************************************/
670186229Sraj/* locore subroutines */
671186229Sraj/************************************************************************/
672186229Sraj
673176771Sraj/*
674176771Sraj * Cache disable/enable/inval sequences according
675176771Sraj * to section 2.16 of E500CORE RM.
676176771Sraj */
677176771SrajENTRY(dcache_inval)
678176771Sraj	/* Invalidate d-cache */
679176771Sraj	mfspr	%r3, SPR_L1CSR0
680176771Sraj	ori	%r3, %r3, (L1CSR0_DCFI | L1CSR0_DCLFR)@l
681176771Sraj	msync
682176771Sraj	isync
683176771Sraj	mtspr	SPR_L1CSR0, %r3
684176771Sraj	isync
685186230Sraj1:	mfspr	%r3, SPR_L1CSR0
686186230Sraj	andi.	%r3, %r3, L1CSR0_DCFI
687186230Sraj	bne	1b
688176771Sraj	blr
689176771Sraj
690176771SrajENTRY(dcache_disable)
691176771Sraj	/* Disable d-cache */
692176771Sraj	mfspr	%r3, SPR_L1CSR0
693176771Sraj	li	%r4, L1CSR0_DCE@l
694176771Sraj	not	%r4, %r4
695176771Sraj	and	%r3, %r3, %r4
696176771Sraj	msync
697176771Sraj	isync
698176771Sraj	mtspr	SPR_L1CSR0, %r3
699176771Sraj	isync
700176771Sraj	blr
701176771Sraj
702176771SrajENTRY(dcache_enable)
703176771Sraj	/* Enable d-cache */
704176771Sraj	mfspr	%r3, SPR_L1CSR0
705176771Sraj	oris	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@h
706176771Sraj	ori	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@l
707176771Sraj	msync
708176771Sraj	isync
709176771Sraj	mtspr	SPR_L1CSR0, %r3
710176771Sraj	isync
711176771Sraj	blr
712176771Sraj
713176771SrajENTRY(icache_inval)
714176771Sraj	/* Invalidate i-cache */
715176771Sraj	mfspr	%r3, SPR_L1CSR1
716176771Sraj	ori	%r3, %r3, (L1CSR1_ICFI | L1CSR1_ICLFR)@l
717176771Sraj	isync
718176771Sraj	mtspr	SPR_L1CSR1, %r3
719176771Sraj	isync
720186230Sraj1:	mfspr	%r3, SPR_L1CSR1
721186230Sraj	andi.	%r3, %r3, L1CSR1_ICFI
722186230Sraj	bne	1b
723176771Sraj	blr
724176771Sraj
725176771SrajENTRY(icache_disable)
726176771Sraj	/* Disable i-cache */
727176771Sraj	mfspr	%r3, SPR_L1CSR1
728176771Sraj	li	%r4, L1CSR1_ICE@l
729176771Sraj	not	%r4, %r4
730176771Sraj	and	%r3, %r3, %r4
731176771Sraj	isync
732176771Sraj	mtspr	SPR_L1CSR1, %r3
733176771Sraj	isync
734176771Sraj	blr
735176771Sraj
736176771SrajENTRY(icache_enable)
737176771Sraj	/* Enable i-cache */
738176771Sraj	mfspr	%r3, SPR_L1CSR1
739176771Sraj	oris	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@h
740176771Sraj	ori	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@l
741176771Sraj	isync
742176771Sraj	mtspr	SPR_L1CSR1, %r3
743176771Sraj	isync
744176771Sraj	blr
745176771Sraj
746176771Sraj/*
747292903Sjhibbits * L2 cache disable/enable/inval sequences for E500mc.
748292903Sjhibbits */
749292903Sjhibbits
750292903SjhibbitsENTRY(l2cache_inval)
751292903Sjhibbits	mfspr	%r3, SPR_L2CSR0
752292903Sjhibbits	oris	%r3, %r3, (L2CSR0_L2FI | L2CSR0_L2LFC)@h
753292903Sjhibbits	ori	%r3, %r3, (L2CSR0_L2FI | L2CSR0_L2LFC)@l
754292903Sjhibbits	isync
755292903Sjhibbits	mtspr	SPR_L2CSR0, %r3
756292903Sjhibbits	isync
757292903Sjhibbits1:	mfspr   %r3, SPR_L2CSR0
758292903Sjhibbits	andis.	%r3, %r3, L2CSR0_L2FI@h
759292903Sjhibbits	bne	1b
760292903Sjhibbits	blr
761292903Sjhibbits
762292903SjhibbitsENTRY(l2cache_enable)
763292903Sjhibbits	mfspr	%r3, SPR_L2CSR0
764292903Sjhibbits	oris	%r3, %r3, (L2CSR0_L2E | L2CSR0_L2PE)@h
765292903Sjhibbits	isync
766292903Sjhibbits	mtspr	SPR_L2CSR0, %r3
767292903Sjhibbits	isync
768292903Sjhibbits	blr
769292903Sjhibbits
770292903Sjhibbits/*
771292903Sjhibbits * Branch predictor setup.
772292903Sjhibbits */
773292903SjhibbitsENTRY(bpred_enable)
774292903Sjhibbits	mfspr	%r3, SPR_BUCSR
775292903Sjhibbits	ori	%r3, %r3, BUCSR_BBFI
776292903Sjhibbits	isync
777292903Sjhibbits	mtspr	SPR_BUCSR, %r3
778292903Sjhibbits	isync
779292903Sjhibbits	ori	%r3, %r3, BUCSR_BPEN
780292903Sjhibbits	isync
781292903Sjhibbits	mtspr	SPR_BUCSR, %r3
782292903Sjhibbits	isync
783292903Sjhibbits	blr
784292903Sjhibbits
785292903SjhibbitsENTRY(dataloss_erratum_access)
786292903Sjhibbits	/* Lock two cache lines into I-Cache */
787292903Sjhibbits	sync
788292903Sjhibbits	mfspr	%r11, SPR_L1CSR1
789292903Sjhibbits	rlwinm	%r11, %r11, 0, ~L1CSR1_ICUL
790292903Sjhibbits	sync
791292903Sjhibbits	isync
792292903Sjhibbits	mtspr	SPR_L1CSR1, %r11
793292903Sjhibbits	isync
794292903Sjhibbits
795298237Sjhibbits	lis	%r8, 2f@h
796298237Sjhibbits	ori	%r8, %r8, 2f@l
797292903Sjhibbits	icbtls	0, 0, %r8
798292903Sjhibbits	addi	%r9, %r8, 64
799292903Sjhibbits
800292903Sjhibbits	sync
801292903Sjhibbits	mfspr	%r11, SPR_L1CSR1
802292903Sjhibbits3:	andi.	%r11, %r11, L1CSR1_ICUL
803292903Sjhibbits	bne	3b
804292903Sjhibbits
805292903Sjhibbits	icbtls	0, 0, %r9
806292903Sjhibbits
807292903Sjhibbits	sync
808292903Sjhibbits	mfspr	%r11, SPR_L1CSR1
809292903Sjhibbits3:	andi.	%r11, %r11, L1CSR1_ICUL
810292903Sjhibbits	bne	3b
811292903Sjhibbits
812292903Sjhibbits	b	2f
813292903Sjhibbits	.align	6
814292903Sjhibbits	/* Inside a locked cacheline, wait a while, write, then wait a while */
815292903Sjhibbits2:	sync
816292903Sjhibbits
817292903Sjhibbits	mfspr	%r5, TBR_TBL
818292903Sjhibbits4:	addis	%r11, %r5, 0x100000@h	/* wait around one million timebase ticks */
819292903Sjhibbits	mfspr	%r5, TBR_TBL
820292903Sjhibbits	subf.	%r5, %r5, %r11
821292903Sjhibbits	bgt	4b
822292903Sjhibbits
823292903Sjhibbits	stw	%r4, 0(%r3)
824292903Sjhibbits
825292903Sjhibbits	mfspr	%r5, TBR_TBL
826292903Sjhibbits4:	addis	%r11, %r5, 0x100000@h	/* wait around one million timebase ticks */
827292903Sjhibbits	mfspr	%r5, TBR_TBL
828292903Sjhibbits	subf.	%r5, %r5, %r11
829292903Sjhibbits	bgt	4b
830292903Sjhibbits
831292903Sjhibbits	sync
832292903Sjhibbits
833292903Sjhibbits	/*
834292903Sjhibbits	 * Fill out the rest of this cache line and the next with nops,
835292903Sjhibbits	 * to ensure that nothing outside the locked area will be
836292903Sjhibbits	 * fetched due to a branch.
837292903Sjhibbits	 */
838292903Sjhibbits	.rept 19
839292903Sjhibbits	nop
840292903Sjhibbits	.endr
841292903Sjhibbits
842292903Sjhibbits	icblc	0, 0, %r8
843292903Sjhibbits	icblc	0, 0, %r9
844292903Sjhibbits
845292903Sjhibbits	blr
846292903Sjhibbits
847176771Sraj/************************************************************************/
848176771Sraj/* Data section								*/
849176771Sraj/************************************************************************/
850176771Sraj	.data
851279750Snwhitehorn	.align 3
852279750SnwhitehornGLOBAL(__startkernel)
853279750Snwhitehorn	.long   begin
854279750SnwhitehornGLOBAL(__endkernel)
855279750Snwhitehorn	.long   end
856176771Sraj	.align	4
857182198Srajtmpstack:
858182198Sraj	.space	TMPSTACKSZ
859277334Snwhitehorntmpstackbound:
860277334Snwhitehorn	.space 10240	/* XXX: this really should not be necessary */
861176771Sraj
862176771Sraj/*
863176771Sraj * Compiled KERNBASE locations
864176771Sraj */
865176771Sraj	.globl	kernbase
866176771Sraj	.set	kernbase, KERNBASE
867176771Sraj
868176771Sraj#include <powerpc/booke/trap_subr.S>
869