locore.S revision 186289
1176771Sraj/*-
2186229Sraj * Copyright (C) 2007-2008 Semihalf, Rafal Jaworowski <raj@semihalf.com>
3176771Sraj * Copyright (C) 2006 Semihalf, Marian Balakowicz <m8@semihalf.com>
4176771Sraj * All rights reserved.
5176771Sraj *
6176771Sraj * Redistribution and use in source and binary forms, with or without
7176771Sraj * modification, are permitted provided that the following conditions
8176771Sraj * are met:
9176771Sraj * 1. Redistributions of source code must retain the above copyright
10176771Sraj *    notice, this list of conditions and the following disclaimer.
11176771Sraj * 2. Redistributions in binary form must reproduce the above copyright
12176771Sraj *    notice, this list of conditions and the following disclaimer in the
13176771Sraj *    documentation and/or other materials provided with the distribution.
14176771Sraj *
15176771Sraj * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
16176771Sraj * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
17176771Sraj * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN
18176771Sraj * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
19176771Sraj * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
20176771Sraj * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21176771Sraj * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
22176771Sraj * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
23176771Sraj * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
24176771Sraj * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25176771Sraj *
26176771Sraj * $FreeBSD: head/sys/powerpc/booke/locore.S 186289 2008-12-18 18:28:03Z raj $
27176771Sraj */
28176771Sraj
29176771Sraj#include "assym.s"
30176771Sraj
31186229Sraj#include <machine/asm.h>
32176771Sraj#include <machine/param.h>
33176771Sraj#include <machine/spr.h>
34176771Sraj#include <machine/psl.h>
35176771Sraj#include <machine/pte.h>
36176771Sraj#include <machine/trap.h>
37176771Sraj#include <machine/vmparam.h>
38176771Sraj#include <machine/tlb.h>
39176771Sraj#include <machine/bootinfo.h>
40176771Sraj
41182198Sraj#define TMPSTACKSZ	16384
42182198Sraj
43184319Smarcel	.text
44184319Smarcel	.globl	btext
45184319Smarcelbtext:
46184319Smarcel
47176771Sraj/*
48176771Sraj * This symbol is here for the benefit of kvm_mkdb, and is supposed to
49176771Sraj * mark the start of kernel text.
50176771Sraj */
51176771Sraj	.globl	kernel_text
52176771Srajkernel_text:
53176771Sraj
54176771Sraj/*
55176771Sraj * Startup entry.  Note, this must be the first thing in the text segment!
56176771Sraj */
57176771Sraj	.text
58176771Sraj	.globl	__start
59176771Sraj__start:
60176771Sraj
61176771Sraj/*
62186229Sraj * Assumptions on the boot loader:
63176771Sraj *  - system memory starts from physical address 0
64176771Sraj *  - it's mapped by a single TBL1 entry
65176771Sraj *  - TLB1 mapping is 1:1 pa to va
66186229Sraj *  - kernel is loaded at 16MB boundary
67176771Sraj *  - all PID registers are set to the same value
68186229Sraj *  - CPU is running in AS=0
69176771Sraj *
70186229Sraj * Registers contents provided by the loader(8):
71176771Sraj *	r1	: stack pointer
72176771Sraj *	r3	: metadata pointer
73176771Sraj *
74176771Sraj * We rearrange the TLB1 layout as follows:
75186229Sraj *  - find TLB1 entry we started in
76176771Sraj *  - make sure it's protected, ivalidate other entries
77186229Sraj *  - create temp entry in the second AS (make sure it's not TLB[1])
78176771Sraj *  - switch to temp mapping
79186229Sraj *  - map 16MB of RAM in TLB1[1]
80176771Sraj *  - use AS=1, set EPN to KERNBASE and RPN to kernel load address
81186229Sraj *  - switch to to TLB1[1] mapping
82176771Sraj *  - invalidate temp mapping
83176771Sraj *
84186229Sraj * locore registers use:
85176771Sraj *	r1	: stack pointer
86186229Sraj *	r2	: trace pointer (AP only, for early diagnostics)
87186229Sraj *	r3-r27	: scratch registers
88186229Sraj *	r28	: kernload
89186229Sraj *	r29	: temp TLB1 entry
90186229Sraj *	r30	: initial TLB1 entry we started in
91186229Sraj *	r31	: metadata pointer
92176771Sraj */
93176771Sraj
94176771Sraj/*
95186229Sraj * Keep metadata ptr in r31 for later use.
96176771Sraj */
97186229Sraj	mr	%r31, %r3
98176771Sraj
99176771Sraj/*
100176771Sraj * Initial cleanup
101176771Sraj */
102186229Sraj	li	%r3, PSL_DE	/* Keep debug exceptions for CodeWarrior. */
103186229Sraj	mtmsr	%r3
104176771Sraj	isync
105176771Sraj
106186229Sraj	/* Invalidate all entries in TLB0 */
107186229Sraj	li	%r3, 0
108186229Sraj	bl	tlb_inval_all
109176771Sraj
110176771Sraj/*
111186229Sraj * Locate the TLB1 entry that maps this code
112176771Sraj */
113186229Sraj	bl	1f
114186229Sraj1:	mflr	%r3
115186229Sraj	bl	tlb1_find_current	/* the entry number found is returned in r30 */
116176771Sraj
117186229Sraj	bl	tlb1_inval_all_but_current
118176771Sraj/*
119186229Sraj * Create temporary mapping in AS=1 and switch to it
120176771Sraj */
121186229Sraj	bl	tlb1_temp_mapping_as1
122176771Sraj
123186229Sraj	mfmsr	%r3
124186229Sraj	ori	%r3, %r3, (PSL_IS | PSL_DS)
125186229Sraj	bl	2f
126186229Sraj2:	mflr	%r4
127186229Sraj	addi	%r4, %r4, 20
128186229Sraj	mtspr	SPR_SRR0, %r4
129186229Sraj	mtspr	SPR_SRR1, %r3
130176771Sraj	rfi				/* Switch context */
131176771Sraj
132176771Sraj/*
133176771Sraj * Invalidate initial entry
134176771Sraj */
135186229Sraj	mr	%r3, %r30
136176771Sraj	bl	tlb1_inval_entry
137176771Sraj
138176771Sraj/*
139176771Sraj * Setup final mapping in TLB1[1] and switch to it
140176771Sraj */
141176771Sraj	/* Final kernel mapping, map in 16 MB of RAM */
142186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
143186229Sraj	li	%r4, 1			/* Entry 1 */
144186229Sraj	rlwimi	%r3, %r4, 16, 12, 15
145186229Sraj	mtspr	SPR_MAS0, %r3
146176771Sraj	isync
147176771Sraj
148186229Sraj	li	%r3, (TLB_SIZE_16M << MAS1_TSIZE_SHIFT)@l
149186229Sraj	oris	%r3, %r3, (MAS1_VALID | MAS1_IPROT)@h
150186229Sraj	mtspr	SPR_MAS1, %r3		/* note TS was not filled, so it's TS=0 */
151176771Sraj	isync
152176771Sraj
153186229Sraj	lis	%r3, KERNBASE@h
154186229Sraj	ori	%r3, %r3, KERNBASE@l	/* EPN = KERNBASE */
155186229Sraj	mtspr	SPR_MAS2, %r3
156176771Sraj	isync
157176771Sraj
158186229Sraj	/* Discover phys load address */
159186229Sraj	bl	3f
160186229Sraj3:	mflr	%r4			/* Use current address */
161186229Sraj	rlwinm	%r4, %r4, 0, 0, 7	/* 16MB alignment mask */
162186229Sraj	mr	%r28, %r4		/* Keep kernel load address */
163186229Sraj	ori	%r4, %r4, (MAS3_SX | MAS3_SW | MAS3_SR)@l
164186229Sraj	mtspr	SPR_MAS3, %r4		/* Set RPN and protection */
165176771Sraj	isync
166176771Sraj	tlbwe
167176771Sraj	isync
168176771Sraj	msync
169176771Sraj
170176771Sraj	/* Switch to the above TLB1[1] mapping */
171186229Sraj	bl	4f
172186229Sraj4:	mflr	%r4
173186229Sraj	rlwinm	%r4, %r4, 0, 8, 31	/* Current offset from kernel load address */
174186229Sraj	rlwinm	%r3, %r3, 0, 0, 19
175186229Sraj	add	%r4, %r4, %r3		/* Convert to kernel virtual address */
176186229Sraj	addi	%r4, %r4, 36
177186229Sraj	li	%r3, PSL_DE		/* Note AS=0 */
178186229Sraj	mtspr   SPR_SRR0, %r4
179186229Sraj	mtspr   SPR_SRR1, %r3
180176771Sraj	rfi
181176771Sraj
182176771Sraj/*
183176771Sraj * Invalidate temp mapping
184176771Sraj */
185186229Sraj	mr	%r3, %r29
186176771Sraj	bl	tlb1_inval_entry
187176771Sraj
188176771Sraj/*
189186229Sraj * Save kernel load address for later use.
190186229Sraj */
191186229Sraj	lis	%r3, kernload@ha
192186229Sraj	addi	%r3, %r3, kernload@l
193186229Sraj	stw	%r28, 0(%r3)
194186229Sraj
195186229Sraj/*
196176771Sraj * Setup a temporary stack
197176771Sraj */
198182198Sraj	lis	%r1, tmpstack@ha
199182198Sraj	addi	%r1, %r1, tmpstack@l
200182198Sraj	addi	%r1, %r1, (TMPSTACKSZ - 8)
201176771Sraj
202176771Sraj/*
203186289Sraj * Initialise exception vector offsets
204176771Sraj */
205176771Sraj	bl	ivor_setup
206176771Sraj
207176771Sraj/*
208186229Sraj * Set up arguments and jump to system initialization code
209176771Sraj */
210176771Sraj	lis	%r3, kernel_text@ha
211176771Sraj	addi	%r3, %r3, kernel_text@l
212176771Sraj	lis	%r4, _end@ha
213176771Sraj	addi	%r4, %r4, _end@l
214186229Sraj	mr	%r5, %r31		/* metadata ptr */
215176771Sraj
216186229Sraj	/* Prepare e500 core */
217182198Sraj	bl	e500_init
218182198Sraj
219186229Sraj	/* Switch to thread0.td_kstack now */
220182198Sraj	mr	%r1, %r3
221182198Sraj	li	%r3, 0
222182198Sraj	stw	%r3, 0(%r1)
223182198Sraj
224186229Sraj	/* Machine independet part, does not return */
225186229Sraj	bl	mi_startup
226186229Sraj	/* NOT REACHED */
227186229Sraj5:	b	5b
228176771Sraj
229186229Sraj/*
230186229Sraj * Invalidate all entries in the given TLB.
231186229Sraj *
232186229Sraj * r3	TLBSEL
233186229Sraj */
234186229Srajtlb_inval_all:
235186229Sraj	rlwinm	%r3, %r3, 3, 0x18	/* TLBSEL */
236186229Sraj	ori	%r3, %r3, 0x4		/* INVALL */
237186229Sraj	tlbivax	0, %r3
238186229Sraj	isync
239186229Sraj	msync
240176771Sraj
241186229Sraj	tlbsync
242186229Sraj	msync
243186229Sraj	blr
244186229Sraj
245186229Sraj/*
246186229Sraj * expects address to look up in r3, returns entry number in r30
247186229Sraj *
248186229Sraj * FIXME: the hidden assumption is we are now running in AS=0, but we should
249186229Sraj * retrieve actual AS from MSR[IS|DS] and put it in MAS6[SAS]
250186229Sraj */
251186229Srajtlb1_find_current:
252186229Sraj	mfspr	%r17, SPR_PID0
253186229Sraj	slwi	%r17, %r17, MAS6_SPID0_SHIFT
254186229Sraj	mtspr	SPR_MAS6, %r17
255176771Sraj	isync
256186229Sraj	tlbsx	0, %r3
257186229Sraj	mfspr	%r17, SPR_MAS0
258186229Sraj	rlwinm	%r30, %r17, 16, 20, 31		/* MAS0[ESEL] -> r30 */
259176771Sraj
260186229Sraj	/* Make sure we have IPROT set on the entry */
261186229Sraj	mfspr	%r17, SPR_MAS1
262186229Sraj	oris	%r17, %r17, MAS1_IPROT@h
263186229Sraj	mtspr	SPR_MAS1, %r17
264176771Sraj	isync
265176771Sraj	tlbwe
266176771Sraj	isync
267176771Sraj	msync
268176771Sraj	blr
269176771Sraj
270186229Sraj/*
271186229Sraj * Invalidates a single entry in TLB1.
272186229Sraj *
273186229Sraj * r3		ESEL
274186229Sraj * r4-r5	scratched
275186229Sraj */
276186229Srajtlb1_inval_entry:
277186229Sraj	lis	%r4, MAS0_TLBSEL1@h	/* Select TLB1 */
278186229Sraj	rlwimi	%r4, %r3, 16, 12, 15	/* Select our entry */
279186229Sraj	mtspr	SPR_MAS0, %r4
280186229Sraj	isync
281186229Sraj	tlbre
282186229Sraj	li	%r5, 0			/* MAS1[V] = 0 */
283186229Sraj	mtspr	SPR_MAS1, %r5
284186229Sraj	isync
285186229Sraj	tlbwe
286186229Sraj	isync
287186229Sraj	msync
288176771Sraj	blr
289176771Sraj
290176771Sraj/*
291186229Sraj * r30		current entry number
292186229Sraj * r29		returned temp entry
293186229Sraj * r3-r5	scratched
294176771Sraj */
295186229Srajtlb1_temp_mapping_as1:
296186229Sraj	/* Read our current translation */
297186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
298186229Sraj	rlwimi	%r3, %r30, 16, 12, 15	/* Select our current entry */
299186229Sraj	mtspr	SPR_MAS0, %r3
300186229Sraj	isync
301186229Sraj	tlbre
302176771Sraj
303186229Sraj	/*
304186229Sraj	 * Prepare and write temp entry
305186229Sraj	 *
306186229Sraj	 * FIXME this is not robust against overflow i.e. when the current
307186229Sraj	 * entry is the last in TLB1
308186229Sraj	 */
309186229Sraj	lis	%r3, MAS0_TLBSEL1@h	/* Select TLB1 */
310186229Sraj	addi	%r29, %r30, 1		/* Use next entry. */
311186229Sraj	li	%r4, 1
312186229Sraj	cmpw	%r4, %r29
313186229Sraj	bne	1f
314186229Sraj	addi	%r29, %r29, 1
315186229Sraj1:	rlwimi	%r3, %r29, 16, 12, 15	/* Select temp entry */
316186229Sraj	mtspr	SPR_MAS0, %r3
317176771Sraj	isync
318186229Sraj	mfspr	%r5, SPR_MAS1
319186229Sraj	li	%r4, 1			/* AS=1 */
320186229Sraj	rlwimi	%r5, %r4, 12, 19, 19
321186229Sraj	li	%r4, 0			/* Global mapping, TID=0 */
322186229Sraj	rlwimi	%r5, %r4, 16, 8, 15
323186229Sraj	oris	%r5, %r5, (MAS1_VALID | MAS1_IPROT)@h
324186229Sraj	mtspr	SPR_MAS1, %r5
325176771Sraj	isync
326186229Sraj	tlbwe
327186229Sraj	isync
328176771Sraj	msync
329176771Sraj	blr
330176771Sraj
331176771Sraj/*
332186229Sraj * Loops over TLB1, invalidates all entries skipping the one which currently
333186229Sraj * maps this code.
334176771Sraj *
335186229Sraj * r30		current entry
336186229Sraj * r3-r5	scratched
337176771Sraj */
338186229Srajtlb1_inval_all_but_current:
339186229Sraj	mr	%r6, %r3
340186229Sraj	mfspr	%r3, SPR_TLB1CFG	/* Get number of entries */
341186229Sraj	andi.	%r3, %r3, TLBCFG_NENTRY_MASK@l
342186229Sraj	li	%r4, 0			/* Start from Entry 0 */
343186229Sraj1:	lis	%r5, MAS0_TLBSEL1@h
344186229Sraj	rlwimi	%r5, %r4, 16, 12, 15
345186229Sraj	mtspr	SPR_MAS0, %r5
346176771Sraj	isync
347186229Sraj	tlbre
348186229Sraj	mfspr	%r5, SPR_MAS1
349186229Sraj	cmpw	%r4, %r30		/* our current entry? */
350186229Sraj	beq	2f
351186229Sraj	rlwinm	%r5, %r5, 0, 2, 31	/* clear VALID and IPROT bits */
352186229Sraj	mtspr	SPR_MAS1, %r5
353176771Sraj	isync
354186229Sraj	tlbwe
355186229Sraj	isync
356176771Sraj	msync
357186229Sraj2:	addi	%r4, %r4, 1
358186229Sraj	cmpw	%r4, %r3		/* Check if this is the last entry */
359186229Sraj	bne	1b
360176771Sraj	blr
361176771Sraj
362186229Sraj/************************************************************************/
363186229Sraj/* locore subroutines */
364186229Sraj/************************************************************************/
365186229Sraj
366186229Srajivor_setup:
367186229Sraj	/* Set base address of interrupt handler routines */
368186229Sraj	lis	%r3, interrupt_vector_base@h
369186229Sraj	mtspr	SPR_IVPR, %r3
370186229Sraj
371186229Sraj	/* Assign interrupt handler routines offsets */
372186229Sraj	li	%r3, int_critical_input@l
373186229Sraj	mtspr	SPR_IVOR0, %r3
374186229Sraj	li	%r3, int_machine_check@l
375186229Sraj	mtspr	SPR_IVOR1, %r3
376186229Sraj	li	%r3, int_data_storage@l
377186229Sraj	mtspr	SPR_IVOR2, %r3
378186229Sraj	li	%r3, int_instr_storage@l
379186229Sraj	mtspr	SPR_IVOR3, %r3
380186229Sraj	li	%r3, int_external_input@l
381186229Sraj	mtspr	SPR_IVOR4, %r3
382186229Sraj	li	%r3, int_alignment@l
383186229Sraj	mtspr	SPR_IVOR5, %r3
384186229Sraj	li	%r3, int_program@l
385186229Sraj	mtspr	SPR_IVOR6, %r3
386186229Sraj	li	%r3, int_syscall@l
387186229Sraj	mtspr	SPR_IVOR8, %r3
388186229Sraj	li	%r3, int_decrementer@l
389186229Sraj	mtspr	SPR_IVOR10, %r3
390186229Sraj	li	%r3, int_fixed_interval_timer@l
391186229Sraj	mtspr	SPR_IVOR11, %r3
392186229Sraj	li	%r3, int_watchdog@l
393186229Sraj	mtspr	SPR_IVOR12, %r3
394186229Sraj	li	%r3, int_data_tlb_error@l
395186229Sraj	mtspr	SPR_IVOR13, %r3
396186229Sraj	li	%r3, int_inst_tlb_error@l
397186229Sraj	mtspr	SPR_IVOR14, %r3
398186229Sraj	li	%r3, int_debug@l
399186229Sraj	mtspr	SPR_IVOR15, %r3
400186229Sraj	blr
401186229Sraj
402176771Sraj/*
403176771Sraj * Cache disable/enable/inval sequences according
404176771Sraj * to section 2.16 of E500CORE RM.
405176771Sraj */
406176771SrajENTRY(dcache_inval)
407176771Sraj	/* Invalidate d-cache */
408176771Sraj	mfspr	%r3, SPR_L1CSR0
409176771Sraj	ori	%r3, %r3, (L1CSR0_DCFI | L1CSR0_DCLFR)@l
410176771Sraj	msync
411176771Sraj	isync
412176771Sraj	mtspr	SPR_L1CSR0, %r3
413176771Sraj	isync
414186230Sraj1:	mfspr	%r3, SPR_L1CSR0
415186230Sraj	andi.	%r3, %r3, L1CSR0_DCFI
416186230Sraj	bne	1b
417176771Sraj	blr
418176771Sraj
419176771SrajENTRY(dcache_disable)
420176771Sraj	/* Disable d-cache */
421176771Sraj	mfspr	%r3, SPR_L1CSR0
422176771Sraj	li	%r4, L1CSR0_DCE@l
423176771Sraj	not	%r4, %r4
424176771Sraj	and	%r3, %r3, %r4
425176771Sraj	msync
426176771Sraj	isync
427176771Sraj	mtspr	SPR_L1CSR0, %r3
428176771Sraj	isync
429176771Sraj	blr
430176771Sraj
431176771SrajENTRY(dcache_enable)
432176771Sraj	/* Enable d-cache */
433176771Sraj	mfspr	%r3, SPR_L1CSR0
434176771Sraj	oris	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@h
435176771Sraj	ori	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@l
436176771Sraj	msync
437176771Sraj	isync
438176771Sraj	mtspr	SPR_L1CSR0, %r3
439176771Sraj	isync
440176771Sraj	blr
441176771Sraj
442176771SrajENTRY(icache_inval)
443176771Sraj	/* Invalidate i-cache */
444176771Sraj	mfspr	%r3, SPR_L1CSR1
445176771Sraj	ori	%r3, %r3, (L1CSR1_ICFI | L1CSR1_ICLFR)@l
446176771Sraj	isync
447176771Sraj	mtspr	SPR_L1CSR1, %r3
448176771Sraj	isync
449186230Sraj1:	mfspr	%r3, SPR_L1CSR1
450186230Sraj	andi.	%r3, %r3, L1CSR1_ICFI
451186230Sraj	bne	1b
452176771Sraj	blr
453176771Sraj
454176771SrajENTRY(icache_disable)
455176771Sraj	/* Disable i-cache */
456176771Sraj	mfspr	%r3, SPR_L1CSR1
457176771Sraj	li	%r4, L1CSR1_ICE@l
458176771Sraj	not	%r4, %r4
459176771Sraj	and	%r3, %r3, %r4
460176771Sraj	isync
461176771Sraj	mtspr	SPR_L1CSR1, %r3
462176771Sraj	isync
463176771Sraj	blr
464176771Sraj
465176771SrajENTRY(icache_enable)
466176771Sraj	/* Enable i-cache */
467176771Sraj	mfspr	%r3, SPR_L1CSR1
468176771Sraj	oris	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@h
469176771Sraj	ori	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@l
470176771Sraj	isync
471176771Sraj	mtspr	SPR_L1CSR1, %r3
472176771Sraj	isync
473176771Sraj	blr
474176771Sraj
475176771Sraj/*
476176771Sraj * int setfault()
477176771Sraj *
478176771Sraj * Similar to setjmp to setup for handling faults on accesses to user memory.
479176771Sraj * Any routine using this may only call bcopy, either the form below,
480176771Sraj * or the (currently used) C code optimized, so it doesn't use any non-volatile
481176771Sraj * registers.
482176771Sraj */
483176771Sraj	.globl	setfault
484176771Srajsetfault:
485176771Sraj	mflr	%r0
486176771Sraj	mfsprg0	%r4
487176771Sraj	lwz	%r4, PC_CURTHREAD(%r4)
488176771Sraj	lwz	%r4, TD_PCB(%r4)
489176771Sraj	stw	%r3, PCB_ONFAULT(%r4)
490176771Sraj	mfcr	%r10
491176771Sraj	mfctr	%r11
492176771Sraj	mfxer	%r12
493176771Sraj	stw	%r0, 0(%r3)
494176771Sraj	stw	%r1, 4(%r3)
495176771Sraj	stw	%r2, 8(%r3)
496176771Sraj	stmw	%r10, 12(%r3)		/* store CR, CTR, XER, [r13 .. r31] */
497176771Sraj	li	%r3, 0			/* return FALSE */
498176771Sraj	blr
499176771Sraj
500176771Sraj/************************************************************************/
501176771Sraj/* Data section								*/
502176771Sraj/************************************************************************/
503176771Sraj	.data
504176771Sraj	.align	4
505182198Srajtmpstack:
506182198Sraj	.space	TMPSTACKSZ
507176771Sraj
508176771Sraj/*
509176771Sraj * Compiled KERNBASE locations
510176771Sraj */
511176771Sraj	.globl	kernbase
512176771Sraj	.set	kernbase, KERNBASE
513176771Sraj
514176771Sraj/*
515176771Sraj * Globals
516176771Sraj */
517176771Sraj#define	INTRCNT_COUNT	256		/* max(HROWPIC_IRQMAX,OPENPIC_IRQMAX) */
518176771Sraj
519176771SrajGLOBAL(kernload)
520186229Sraj	.long	0
521176771SrajGLOBAL(intrnames)
522176771Sraj	.space	INTRCNT_COUNT * (MAXCOMLEN + 1) * 2
523176771SrajGLOBAL(eintrnames)
524176771Sraj	.align 4
525176771SrajGLOBAL(intrcnt)
526176771Sraj	.space	INTRCNT_COUNT * 4 * 2
527176771SrajGLOBAL(eintrcnt)
528176771Sraj
529176771Sraj#include <powerpc/booke/trap_subr.S>
530