Deleted Added
sdiff udiff text old ( 222391 ) new ( 222400 )
full compact
1/*-
2 * Copyright (C) 2007-2009 Semihalf, Rafal Jaworowski <raj@semihalf.com>
3 * Copyright (C) 2006 Semihalf, Marian Balakowicz <m8@semihalf.com>
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:

--- 9 unchanged lines hidden (view full) ---

18 * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
19 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
20 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
22 * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
23 * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 *
26 * $FreeBSD: head/sys/powerpc/booke/locore.S 222400 2011-05-28 04:10:44Z marcel $
27 */
28
29#include "assym.s"
30
31#include <machine/asm.h>
32#include <machine/hid.h>
33#include <machine/param.h>
34#include <machine/spr.h>

--- 43 unchanged lines hidden (view full) ---

78 * - map 16MB of RAM in TLB1[1]
79 * - use AS=1, set EPN to KERNBASE and RPN to kernel load address
80 * - switch to to TLB1[1] mapping
81 * - invalidate temp mapping
82 *
83 * locore registers use:
84 * r1 : stack pointer
85 * r2 : trace pointer (AP only, for early diagnostics)
86 * r3-r26 : scratch registers
87 * r27 : kernload
88 * r28 : temp TLB1 entry
89 * r29 : initial TLB1 entry we started in
90 * r30-r31 : arguments (metadata pointer)
91 */
92
93/*
94 * Keep arguments in r30 & r31 for later use.
95 */
96 mr %r30, %r3
97 mr %r31, %r4
98
99/*
100 * Initial cleanup
101 */
102 li %r3, PSL_DE /* Keep debug exceptions for CodeWarrior. */
103 mtmsr %r3
104 isync
105

--- 10 unchanged lines hidden (view full) ---

116 li %r3, 0
117 bl tlb_inval_all
118
119/*
120 * Locate the TLB1 entry that maps this code
121 */
122 bl 1f
1231: mflr %r3
124 bl tlb1_find_current /* the entry found is returned in r29 */
125
126 bl tlb1_inval_all_but_current
127/*
128 * Create temporary mapping in AS=1 and switch to it
129 */
130 bl tlb1_temp_mapping_as1
131
132 mfmsr %r3
133 ori %r3, %r3, (PSL_IS | PSL_DS)
134 bl 2f
1352: mflr %r4
136 addi %r4, %r4, 20
137 mtspr SPR_SRR0, %r4
138 mtspr SPR_SRR1, %r3
139 rfi /* Switch context */
140
141/*
142 * Invalidate initial entry
143 */
144 mr %r3, %r29
145 bl tlb1_inval_entry
146
147/*
148 * Setup final mapping in TLB1[1] and switch to it
149 */
150 /* Final kernel mapping, map in 16 MB of RAM */
151 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
152 li %r4, 0 /* Entry 0 */

--- 13 unchanged lines hidden (view full) ---

166#endif
167 mtspr SPR_MAS2, %r3
168 isync
169
170 /* Discover phys load address */
171 bl 3f
1723: mflr %r4 /* Use current address */
173 rlwinm %r4, %r4, 0, 0, 7 /* 16MB alignment mask */
174 mr %r27, %r4 /* Keep kernel load address */
175 ori %r4, %r4, (MAS3_SX | MAS3_SW | MAS3_SR)@l
176 mtspr SPR_MAS3, %r4 /* Set RPN and protection */
177 isync
178 tlbwe
179 isync
180 msync
181
182 /* Switch to the above TLB1[1] mapping */

--- 6 unchanged lines hidden (view full) ---

189 li %r3, PSL_DE /* Note AS=0 */
190 mtspr SPR_SRR0, %r4
191 mtspr SPR_SRR1, %r3
192 rfi
193
194/*
195 * Invalidate temp mapping
196 */
197 mr %r3, %r28
198 bl tlb1_inval_entry
199
200/*
201 * Save kernel load address for later use.
202 */
203 lis %r3, kernload@ha
204 addi %r3, %r3, kernload@l
205 stw %r27, 0(%r3)
206#ifdef SMP
207 /*
208 * APs need a separate copy of kernload info within the __boot_page
209 * area so they can access this value very early, before their TLBs
210 * are fully set up and the kernload global location is available.
211 */
212 lis %r3, kernload_ap@ha
213 addi %r3, %r3, kernload_ap@l
214 stw %r27, 0(%r3)
215 msync
216#endif
217
218/*
219 * Setup a temporary stack
220 */
221 lis %r1, tmpstack@ha
222 addi %r1, %r1, tmpstack@l
223 addi %r1, %r1, (TMPSTACKSZ - 8)
224
225/*
226 * Initialise exception vector offsets
227 */
228 bl ivor_setup
229
230/*
231 * Set up arguments and jump to system initialization code
232 */
233 mr %r3, %r30
234 mr %r4, %r31
235
236 /* Prepare e500 core */
237 bl booke_init
238
239 /* Switch to thread0.td_kstack now */
240 mr %r1, %r3
241 li %r3, 0
242 stw %r3, 0(%r1)
243
244 /* Machine independet part, does not return */
245 bl mi_startup

--- 37 unchanged lines hidden (view full) ---

283 li %r3, 0
284 bl tlb_inval_all
285
286/*
287 * Find TLB1 entry which is translating us now
288 */
289 bl 2f
2902: mflr %r3
291 bl tlb1_find_current /* the entry number found is in r29 */
292
293 bl tlb1_inval_all_but_current
294/*
295 * Create temporary translation in AS=1 and switch to it
296 */
297 bl tlb1_temp_mapping_as1
298
299 mfmsr %r3
300 ori %r3, %r3, (PSL_IS | PSL_DS)
301 bl 3f
3023: mflr %r4
303 addi %r4, %r4, 20
304 mtspr SPR_SRR0, %r4
305 mtspr SPR_SRR1, %r3
306 rfi /* Switch context */
307
308/*
309 * Invalidate initial entry
310 */
311 mr %r3, %r29
312 bl tlb1_inval_entry
313
314/*
315 * Setup final mapping in TLB1[1] and switch to it
316 */
317 /* Final kernel mapping, map in 16 MB of RAM */
318 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
319 li %r4, 0 /* Entry 0 */

--- 46 unchanged lines hidden (view full) ---

366 * At this point we're running at virtual addresses KERNBASE and beyond so
367 * it's allowed to directly access all locations the kernel was linked
368 * against.
369 */
370
371/*
372 * Invalidate temp mapping
373 */
374 mr %r3, %r28
375 bl tlb1_inval_entry
376
377/*
378 * Setup a temporary stack
379 */
380 lis %r1, tmpstack@ha
381 addi %r1, %r1, tmpstack@l
382 addi %r1, %r1, (TMPSTACKSZ - 8)

--- 35 unchanged lines hidden (view full) ---

418 isync
419 msync
420
421 tlbsync
422 msync
423 blr
424
425/*
426 * expects address to look up in r3, returns entry number in r29
427 *
428 * FIXME: the hidden assumption is we are now running in AS=0, but we should
429 * retrieve actual AS from MSR[IS|DS] and put it in MAS6[SAS]
430 */
431tlb1_find_current:
432 mfspr %r17, SPR_PID0
433 slwi %r17, %r17, MAS6_SPID0_SHIFT
434 mtspr SPR_MAS6, %r17
435 isync
436 tlbsx 0, %r3
437 mfspr %r17, SPR_MAS0
438 rlwinm %r29, %r17, 16, 20, 31 /* MAS0[ESEL] -> r29 */
439
440 /* Make sure we have IPROT set on the entry */
441 mfspr %r17, SPR_MAS1
442 oris %r17, %r17, MAS1_IPROT@h
443 mtspr SPR_MAS1, %r17
444 isync
445 tlbwe
446 isync

--- 16 unchanged lines hidden (view full) ---

463 mtspr SPR_MAS1, %r5
464 isync
465 tlbwe
466 isync
467 msync
468 blr
469
470/*
471 * r29 current entry number
472 * r28 returned temp entry
473 * r3-r5 scratched
474 */
475tlb1_temp_mapping_as1:
476 /* Read our current translation */
477 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
478 rlwimi %r3, %r29, 16, 12, 15 /* Select our current entry */
479 mtspr SPR_MAS0, %r3
480 isync
481 tlbre
482
483 /*
484 * Prepare and write temp entry
485 *
486 * FIXME this is not robust against overflow i.e. when the current
487 * entry is the last in TLB1
488 */
489 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
490 addi %r28, %r29, 1 /* Use next entry. */
491 rlwimi %r3, %r28, 16, 12, 15 /* Select temp entry */
492 mtspr SPR_MAS0, %r3
493 isync
494 mfspr %r5, SPR_MAS1
495 li %r4, 1 /* AS=1 */
496 rlwimi %r5, %r4, 12, 19, 19
497 li %r4, 0 /* Global mapping, TID=0 */
498 rlwimi %r5, %r4, 16, 8, 15
499 oris %r5, %r5, (MAS1_VALID | MAS1_IPROT)@h
500 mtspr SPR_MAS1, %r5
501 isync
502 tlbwe
503 isync
504 msync
505 blr
506
507/*
508 * Loops over TLB1, invalidates all entries skipping the one which currently
509 * maps this code.
510 *
511 * r29 current entry
512 * r3-r5 scratched
513 */
514tlb1_inval_all_but_current:
515 mr %r6, %r3
516 mfspr %r3, SPR_TLB1CFG /* Get number of entries */
517 andi. %r3, %r3, TLBCFG_NENTRY_MASK@l
518 li %r4, 0 /* Start from Entry 0 */
5191: lis %r5, MAS0_TLBSEL1@h
520 rlwimi %r5, %r4, 16, 12, 15
521 mtspr SPR_MAS0, %r5
522 isync
523 tlbre
524 mfspr %r5, SPR_MAS1
525 cmpw %r4, %r29 /* our current entry? */
526 beq 2f
527 rlwinm %r5, %r5, 0, 2, 31 /* clear VALID and IPROT bits */
528 mtspr SPR_MAS1, %r5
529 isync
530 tlbwe
531 isync
532 msync
5332: addi %r4, %r4, 1

--- 265 unchanged lines hidden ---