Deleted Added
full compact
locore.S (222391) locore.S (222400)
1/*-
2 * Copyright (C) 2007-2009 Semihalf, Rafal Jaworowski <raj@semihalf.com>
3 * Copyright (C) 2006 Semihalf, Marian Balakowicz <m8@semihalf.com>
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:

--- 9 unchanged lines hidden (view full) ---

18 * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
19 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
20 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
22 * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
23 * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 *
1/*-
2 * Copyright (C) 2007-2009 Semihalf, Rafal Jaworowski <raj@semihalf.com>
3 * Copyright (C) 2006 Semihalf, Marian Balakowicz <m8@semihalf.com>
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:

--- 9 unchanged lines hidden (view full) ---

18 * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
19 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
20 * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
22 * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
23 * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 *
26 * $FreeBSD: head/sys/powerpc/booke/locore.S 222391 2011-05-27 23:09:12Z marcel $
26 * $FreeBSD: head/sys/powerpc/booke/locore.S 222400 2011-05-28 04:10:44Z marcel $
27 */
28
29#include "assym.s"
30
31#include <machine/asm.h>
32#include <machine/hid.h>
33#include <machine/param.h>
34#include <machine/spr.h>

--- 43 unchanged lines hidden (view full) ---

78 * - map 16MB of RAM in TLB1[1]
79 * - use AS=1, set EPN to KERNBASE and RPN to kernel load address
80 * - switch to to TLB1[1] mapping
81 * - invalidate temp mapping
82 *
83 * locore registers use:
84 * r1 : stack pointer
85 * r2 : trace pointer (AP only, for early diagnostics)
27 */
28
29#include "assym.s"
30
31#include <machine/asm.h>
32#include <machine/hid.h>
33#include <machine/param.h>
34#include <machine/spr.h>

--- 43 unchanged lines hidden (view full) ---

78 * - map 16MB of RAM in TLB1[1]
79 * - use AS=1, set EPN to KERNBASE and RPN to kernel load address
80 * - switch to to TLB1[1] mapping
81 * - invalidate temp mapping
82 *
83 * locore registers use:
84 * r1 : stack pointer
85 * r2 : trace pointer (AP only, for early diagnostics)
86 * r3-r27 : scratch registers
87 * r28 : kernload
88 * r29 : temp TLB1 entry
89 * r30 : initial TLB1 entry we started in
90 * r31 : metadata pointer
86 * r3-r26 : scratch registers
87 * r27 : kernload
88 * r28 : temp TLB1 entry
89 * r29 : initial TLB1 entry we started in
90 * r30-r31 : arguments (metadata pointer)
91 */
92
93/*
91 */
92
93/*
94 * Keep metadata ptr in r31 for later use.
94 * Keep arguments in r30 & r31 for later use.
95 */
95 */
96 mr %r31, %r3
96 mr %r30, %r3
97 mr %r31, %r4
97
98/*
99 * Initial cleanup
100 */
101 li %r3, PSL_DE /* Keep debug exceptions for CodeWarrior. */
102 mtmsr %r3
103 isync
104

--- 10 unchanged lines hidden (view full) ---

115 li %r3, 0
116 bl tlb_inval_all
117
118/*
119 * Locate the TLB1 entry that maps this code
120 */
121 bl 1f
1221: mflr %r3
98
99/*
100 * Initial cleanup
101 */
102 li %r3, PSL_DE /* Keep debug exceptions for CodeWarrior. */
103 mtmsr %r3
104 isync
105

--- 10 unchanged lines hidden (view full) ---

116 li %r3, 0
117 bl tlb_inval_all
118
119/*
120 * Locate the TLB1 entry that maps this code
121 */
122 bl 1f
1231: mflr %r3
123 bl tlb1_find_current /* the entry number found is returned in r30 */
124 bl tlb1_find_current /* the entry found is returned in r29 */
124
125 bl tlb1_inval_all_but_current
126/*
127 * Create temporary mapping in AS=1 and switch to it
128 */
129 bl tlb1_temp_mapping_as1
130
131 mfmsr %r3
132 ori %r3, %r3, (PSL_IS | PSL_DS)
133 bl 2f
1342: mflr %r4
135 addi %r4, %r4, 20
136 mtspr SPR_SRR0, %r4
137 mtspr SPR_SRR1, %r3
138 rfi /* Switch context */
139
140/*
141 * Invalidate initial entry
142 */
125
126 bl tlb1_inval_all_but_current
127/*
128 * Create temporary mapping in AS=1 and switch to it
129 */
130 bl tlb1_temp_mapping_as1
131
132 mfmsr %r3
133 ori %r3, %r3, (PSL_IS | PSL_DS)
134 bl 2f
1352: mflr %r4
136 addi %r4, %r4, 20
137 mtspr SPR_SRR0, %r4
138 mtspr SPR_SRR1, %r3
139 rfi /* Switch context */
140
141/*
142 * Invalidate initial entry
143 */
143 mr %r3, %r30
144 mr %r3, %r29
144 bl tlb1_inval_entry
145
146/*
147 * Setup final mapping in TLB1[1] and switch to it
148 */
149 /* Final kernel mapping, map in 16 MB of RAM */
150 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
151 li %r4, 0 /* Entry 0 */

--- 13 unchanged lines hidden (view full) ---

165#endif
166 mtspr SPR_MAS2, %r3
167 isync
168
169 /* Discover phys load address */
170 bl 3f
1713: mflr %r4 /* Use current address */
172 rlwinm %r4, %r4, 0, 0, 7 /* 16MB alignment mask */
145 bl tlb1_inval_entry
146
147/*
148 * Setup final mapping in TLB1[1] and switch to it
149 */
150 /* Final kernel mapping, map in 16 MB of RAM */
151 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
152 li %r4, 0 /* Entry 0 */

--- 13 unchanged lines hidden (view full) ---

166#endif
167 mtspr SPR_MAS2, %r3
168 isync
169
170 /* Discover phys load address */
171 bl 3f
1723: mflr %r4 /* Use current address */
173 rlwinm %r4, %r4, 0, 0, 7 /* 16MB alignment mask */
173 mr %r28, %r4 /* Keep kernel load address */
174 mr %r27, %r4 /* Keep kernel load address */
174 ori %r4, %r4, (MAS3_SX | MAS3_SW | MAS3_SR)@l
175 mtspr SPR_MAS3, %r4 /* Set RPN and protection */
176 isync
177 tlbwe
178 isync
179 msync
180
181 /* Switch to the above TLB1[1] mapping */

--- 6 unchanged lines hidden (view full) ---

188 li %r3, PSL_DE /* Note AS=0 */
189 mtspr SPR_SRR0, %r4
190 mtspr SPR_SRR1, %r3
191 rfi
192
193/*
194 * Invalidate temp mapping
195 */
175 ori %r4, %r4, (MAS3_SX | MAS3_SW | MAS3_SR)@l
176 mtspr SPR_MAS3, %r4 /* Set RPN and protection */
177 isync
178 tlbwe
179 isync
180 msync
181
182 /* Switch to the above TLB1[1] mapping */

--- 6 unchanged lines hidden (view full) ---

189 li %r3, PSL_DE /* Note AS=0 */
190 mtspr SPR_SRR0, %r4
191 mtspr SPR_SRR1, %r3
192 rfi
193
194/*
195 * Invalidate temp mapping
196 */
196 mr %r3, %r29
197 mr %r3, %r28
197 bl tlb1_inval_entry
198
199/*
200 * Save kernel load address for later use.
201 */
202 lis %r3, kernload@ha
203 addi %r3, %r3, kernload@l
198 bl tlb1_inval_entry
199
200/*
201 * Save kernel load address for later use.
202 */
203 lis %r3, kernload@ha
204 addi %r3, %r3, kernload@l
204 stw %r28, 0(%r3)
205 stw %r27, 0(%r3)
205#ifdef SMP
206 /*
207 * APs need a separate copy of kernload info within the __boot_page
208 * area so they can access this value very early, before their TLBs
209 * are fully set up and the kernload global location is available.
210 */
211 lis %r3, kernload_ap@ha
212 addi %r3, %r3, kernload_ap@l
206#ifdef SMP
207 /*
208 * APs need a separate copy of kernload info within the __boot_page
209 * area so they can access this value very early, before their TLBs
210 * are fully set up and the kernload global location is available.
211 */
212 lis %r3, kernload_ap@ha
213 addi %r3, %r3, kernload_ap@l
213 stw %r28, 0(%r3)
214 stw %r27, 0(%r3)
214 msync
215#endif
216
217/*
218 * Setup a temporary stack
219 */
220 lis %r1, tmpstack@ha
221 addi %r1, %r1, tmpstack@l
222 addi %r1, %r1, (TMPSTACKSZ - 8)
223
224/*
225 * Initialise exception vector offsets
226 */
227 bl ivor_setup
228
229/*
230 * Set up arguments and jump to system initialization code
231 */
215 msync
216#endif
217
218/*
219 * Setup a temporary stack
220 */
221 lis %r1, tmpstack@ha
222 addi %r1, %r1, tmpstack@l
223 addi %r1, %r1, (TMPSTACKSZ - 8)
224
225/*
226 * Initialise exception vector offsets
227 */
228 bl ivor_setup
229
230/*
231 * Set up arguments and jump to system initialization code
232 */
232 lis %r3, kernel_text@ha
233 addi %r3, %r3, kernel_text@l
234 lis %r4, _end@ha
235 addi %r4, %r4, _end@l
236 mr %r5, %r31 /* metadata ptr */
233 mr %r3, %r30
234 mr %r4, %r31
237
238 /* Prepare e500 core */
235
236 /* Prepare e500 core */
239 bl e500_init
237 bl booke_init
240
241 /* Switch to thread0.td_kstack now */
242 mr %r1, %r3
243 li %r3, 0
244 stw %r3, 0(%r1)
245
246 /* Machine independet part, does not return */
247 bl mi_startup

--- 37 unchanged lines hidden (view full) ---

285 li %r3, 0
286 bl tlb_inval_all
287
288/*
289 * Find TLB1 entry which is translating us now
290 */
291 bl 2f
2922: mflr %r3
238
239 /* Switch to thread0.td_kstack now */
240 mr %r1, %r3
241 li %r3, 0
242 stw %r3, 0(%r1)
243
244 /* Machine independet part, does not return */
245 bl mi_startup

--- 37 unchanged lines hidden (view full) ---

283 li %r3, 0
284 bl tlb_inval_all
285
286/*
287 * Find TLB1 entry which is translating us now
288 */
289 bl 2f
2902: mflr %r3
293 bl tlb1_find_current /* the entry number found is in r30 */
291 bl tlb1_find_current /* the entry number found is in r29 */
294
295 bl tlb1_inval_all_but_current
296/*
297 * Create temporary translation in AS=1 and switch to it
298 */
299 bl tlb1_temp_mapping_as1
300
301 mfmsr %r3
302 ori %r3, %r3, (PSL_IS | PSL_DS)
303 bl 3f
3043: mflr %r4
305 addi %r4, %r4, 20
306 mtspr SPR_SRR0, %r4
307 mtspr SPR_SRR1, %r3
308 rfi /* Switch context */
309
310/*
311 * Invalidate initial entry
312 */
292
293 bl tlb1_inval_all_but_current
294/*
295 * Create temporary translation in AS=1 and switch to it
296 */
297 bl tlb1_temp_mapping_as1
298
299 mfmsr %r3
300 ori %r3, %r3, (PSL_IS | PSL_DS)
301 bl 3f
3023: mflr %r4
303 addi %r4, %r4, 20
304 mtspr SPR_SRR0, %r4
305 mtspr SPR_SRR1, %r3
306 rfi /* Switch context */
307
308/*
309 * Invalidate initial entry
310 */
313 mr %r3, %r30
311 mr %r3, %r29
314 bl tlb1_inval_entry
315
316/*
317 * Setup final mapping in TLB1[1] and switch to it
318 */
319 /* Final kernel mapping, map in 16 MB of RAM */
320 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
321 li %r4, 0 /* Entry 0 */

--- 46 unchanged lines hidden (view full) ---

368 * At this point we're running at virtual addresses KERNBASE and beyond so
369 * it's allowed to directly access all locations the kernel was linked
370 * against.
371 */
372
373/*
374 * Invalidate temp mapping
375 */
312 bl tlb1_inval_entry
313
314/*
315 * Setup final mapping in TLB1[1] and switch to it
316 */
317 /* Final kernel mapping, map in 16 MB of RAM */
318 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
319 li %r4, 0 /* Entry 0 */

--- 46 unchanged lines hidden (view full) ---

366 * At this point we're running at virtual addresses KERNBASE and beyond so
367 * it's allowed to directly access all locations the kernel was linked
368 * against.
369 */
370
371/*
372 * Invalidate temp mapping
373 */
376 mr %r3, %r29
374 mr %r3, %r28
377 bl tlb1_inval_entry
378
379/*
380 * Setup a temporary stack
381 */
382 lis %r1, tmpstack@ha
383 addi %r1, %r1, tmpstack@l
384 addi %r1, %r1, (TMPSTACKSZ - 8)

--- 35 unchanged lines hidden (view full) ---

420 isync
421 msync
422
423 tlbsync
424 msync
425 blr
426
427/*
375 bl tlb1_inval_entry
376
377/*
378 * Setup a temporary stack
379 */
380 lis %r1, tmpstack@ha
381 addi %r1, %r1, tmpstack@l
382 addi %r1, %r1, (TMPSTACKSZ - 8)

--- 35 unchanged lines hidden (view full) ---

418 isync
419 msync
420
421 tlbsync
422 msync
423 blr
424
425/*
428 * expects address to look up in r3, returns entry number in r30
426 * expects address to look up in r3, returns entry number in r29
429 *
430 * FIXME: the hidden assumption is we are now running in AS=0, but we should
431 * retrieve actual AS from MSR[IS|DS] and put it in MAS6[SAS]
432 */
433tlb1_find_current:
434 mfspr %r17, SPR_PID0
435 slwi %r17, %r17, MAS6_SPID0_SHIFT
436 mtspr SPR_MAS6, %r17
437 isync
438 tlbsx 0, %r3
439 mfspr %r17, SPR_MAS0
427 *
428 * FIXME: the hidden assumption is we are now running in AS=0, but we should
429 * retrieve actual AS from MSR[IS|DS] and put it in MAS6[SAS]
430 */
431tlb1_find_current:
432 mfspr %r17, SPR_PID0
433 slwi %r17, %r17, MAS6_SPID0_SHIFT
434 mtspr SPR_MAS6, %r17
435 isync
436 tlbsx 0, %r3
437 mfspr %r17, SPR_MAS0
440 rlwinm %r30, %r17, 16, 20, 31 /* MAS0[ESEL] -> r30 */
438 rlwinm %r29, %r17, 16, 20, 31 /* MAS0[ESEL] -> r29 */
441
442 /* Make sure we have IPROT set on the entry */
443 mfspr %r17, SPR_MAS1
444 oris %r17, %r17, MAS1_IPROT@h
445 mtspr SPR_MAS1, %r17
446 isync
447 tlbwe
448 isync

--- 16 unchanged lines hidden (view full) ---

465 mtspr SPR_MAS1, %r5
466 isync
467 tlbwe
468 isync
469 msync
470 blr
471
472/*
439
440 /* Make sure we have IPROT set on the entry */
441 mfspr %r17, SPR_MAS1
442 oris %r17, %r17, MAS1_IPROT@h
443 mtspr SPR_MAS1, %r17
444 isync
445 tlbwe
446 isync

--- 16 unchanged lines hidden (view full) ---

463 mtspr SPR_MAS1, %r5
464 isync
465 tlbwe
466 isync
467 msync
468 blr
469
470/*
473 * r30 current entry number
474 * r29 returned temp entry
471 * r29 current entry number
472 * r28 returned temp entry
475 * r3-r5 scratched
476 */
477tlb1_temp_mapping_as1:
478 /* Read our current translation */
479 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
473 * r3-r5 scratched
474 */
475tlb1_temp_mapping_as1:
476 /* Read our current translation */
477 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
480 rlwimi %r3, %r30, 16, 12, 15 /* Select our current entry */
478 rlwimi %r3, %r29, 16, 12, 15 /* Select our current entry */
481 mtspr SPR_MAS0, %r3
482 isync
483 tlbre
484
485 /*
486 * Prepare and write temp entry
487 *
488 * FIXME this is not robust against overflow i.e. when the current
489 * entry is the last in TLB1
490 */
491 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
479 mtspr SPR_MAS0, %r3
480 isync
481 tlbre
482
483 /*
484 * Prepare and write temp entry
485 *
486 * FIXME this is not robust against overflow i.e. when the current
487 * entry is the last in TLB1
488 */
489 lis %r3, MAS0_TLBSEL1@h /* Select TLB1 */
492 addi %r29, %r30, 1 /* Use next entry. */
493 rlwimi %r3, %r29, 16, 12, 15 /* Select temp entry */
490 addi %r28, %r29, 1 /* Use next entry. */
491 rlwimi %r3, %r28, 16, 12, 15 /* Select temp entry */
494 mtspr SPR_MAS0, %r3
495 isync
496 mfspr %r5, SPR_MAS1
497 li %r4, 1 /* AS=1 */
498 rlwimi %r5, %r4, 12, 19, 19
499 li %r4, 0 /* Global mapping, TID=0 */
500 rlwimi %r5, %r4, 16, 8, 15
501 oris %r5, %r5, (MAS1_VALID | MAS1_IPROT)@h
502 mtspr SPR_MAS1, %r5
503 isync
504 tlbwe
505 isync
506 msync
507 blr
508
509/*
510 * Loops over TLB1, invalidates all entries skipping the one which currently
511 * maps this code.
512 *
492 mtspr SPR_MAS0, %r3
493 isync
494 mfspr %r5, SPR_MAS1
495 li %r4, 1 /* AS=1 */
496 rlwimi %r5, %r4, 12, 19, 19
497 li %r4, 0 /* Global mapping, TID=0 */
498 rlwimi %r5, %r4, 16, 8, 15
499 oris %r5, %r5, (MAS1_VALID | MAS1_IPROT)@h
500 mtspr SPR_MAS1, %r5
501 isync
502 tlbwe
503 isync
504 msync
505 blr
506
507/*
508 * Loops over TLB1, invalidates all entries skipping the one which currently
509 * maps this code.
510 *
513 * r30 current entry
511 * r29 current entry
514 * r3-r5 scratched
515 */
516tlb1_inval_all_but_current:
517 mr %r6, %r3
518 mfspr %r3, SPR_TLB1CFG /* Get number of entries */
519 andi. %r3, %r3, TLBCFG_NENTRY_MASK@l
520 li %r4, 0 /* Start from Entry 0 */
5211: lis %r5, MAS0_TLBSEL1@h
522 rlwimi %r5, %r4, 16, 12, 15
523 mtspr SPR_MAS0, %r5
524 isync
525 tlbre
526 mfspr %r5, SPR_MAS1
512 * r3-r5 scratched
513 */
514tlb1_inval_all_but_current:
515 mr %r6, %r3
516 mfspr %r3, SPR_TLB1CFG /* Get number of entries */
517 andi. %r3, %r3, TLBCFG_NENTRY_MASK@l
518 li %r4, 0 /* Start from Entry 0 */
5191: lis %r5, MAS0_TLBSEL1@h
520 rlwimi %r5, %r4, 16, 12, 15
521 mtspr SPR_MAS0, %r5
522 isync
523 tlbre
524 mfspr %r5, SPR_MAS1
527 cmpw %r4, %r30 /* our current entry? */
525 cmpw %r4, %r29 /* our current entry? */
528 beq 2f
529 rlwinm %r5, %r5, 0, 2, 31 /* clear VALID and IPROT bits */
530 mtspr SPR_MAS1, %r5
531 isync
532 tlbwe
533 isync
534 msync
5352: addi %r4, %r4, 1

--- 265 unchanged lines hidden ---
526 beq 2f
527 rlwinm %r5, %r5, 0, 2, 31 /* clear VALID and IPROT bits */
528 mtspr SPR_MAS1, %r5
529 isync
530 tlbwe
531 isync
532 msync
5332: addi %r4, %r4, 1

--- 265 unchanged lines hidden ---