locore.S revision 182198
1176771Sraj/*-
2176771Sraj * Copyright (C) 2006 Semihalf, Marian Balakowicz <m8@semihalf.com>
3176771Sraj * All rights reserved.
4176771Sraj *
5176771Sraj * Redistribution and use in source and binary forms, with or without
6176771Sraj * modification, are permitted provided that the following conditions
7176771Sraj * are met:
8176771Sraj * 1. Redistributions of source code must retain the above copyright
9176771Sraj *    notice, this list of conditions and the following disclaimer.
10176771Sraj * 2. Redistributions in binary form must reproduce the above copyright
11176771Sraj *    notice, this list of conditions and the following disclaimer in the
12176771Sraj *    documentation and/or other materials provided with the distribution.
13176771Sraj * 3. The name of the author may not be used to endorse or promote products
14176771Sraj *    derived from this software without specific prior written permission.
15176771Sraj *
16176771Sraj * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17176771Sraj * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18176771Sraj * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN
19176771Sraj * NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20176771Sraj * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21176771Sraj * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22176771Sraj * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23176771Sraj * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24176771Sraj * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25176771Sraj * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26176771Sraj *
27176771Sraj * $FreeBSD: head/sys/powerpc/booke/locore.S 182198 2008-08-26 17:07:37Z raj $
28176771Sraj */
29176771Sraj
30176771Sraj#include "assym.s"
31176771Sraj
32176771Sraj#include <machine/param.h>
33176771Sraj#include <machine/asm.h>
34176771Sraj#include <machine/spr.h>
35176771Sraj#include <machine/psl.h>
36176771Sraj#include <machine/pte.h>
37176771Sraj#include <machine/trap.h>
38176771Sraj#include <machine/vmparam.h>
39176771Sraj#include <machine/tlb.h>
40176771Sraj#include <machine/bootinfo.h>
41176771Sraj
42182198Sraj#define TMPSTACKSZ	16384
43182198Sraj
44176771Sraj/*
45176771Sraj * This symbol is here for the benefit of kvm_mkdb, and is supposed to
46176771Sraj * mark the start of kernel text.
47176771Sraj */
48176771Sraj	.text
49176771Sraj	.globl	kernel_text
50176771Srajkernel_text:
51176771Sraj
52176771Sraj/*
53176771Sraj * Startup entry.  Note, this must be the first thing in the text segment!
54176771Sraj */
55176771Sraj	.text
56176771Sraj	.globl	__start
57176771Sraj__start:
58176771Sraj
59176771Sraj/*
60176771Sraj * Assumption on a boot loader:
61176771Sraj *  - system memory starts from physical address 0
62176771Sraj *  - kernel is loaded at 16MB boundary
63176771Sraj *  - it's mapped by a single TBL1 entry
64176771Sraj *  - TLB1 mapping is 1:1 pa to va
65176771Sraj *  - all PID registers are set to the same value
66176771Sraj *
67176771Sraj * Loader register use:
68176771Sraj *	r1	: stack pointer
69176771Sraj *	r3	: metadata pointer
70176771Sraj *
71176771Sraj * We rearrange the TLB1 layout as follows:
72176771Sraj *  - find AS and entry kernel started in
73176771Sraj *  - make sure it's protected, ivalidate other entries
74176771Sraj *  - create temp entry in the second AS (make sure it's not TLB[15])
75176771Sraj *  - switch to temp mapping
76176771Sraj *  - map 16MB of RAM in TLB1[15]
77176771Sraj *  - use AS=1, set EPN to KERNBASE and RPN to kernel load address
78176771Sraj *  - switch to to TLB1[15] mapping
79176771Sraj *  - invalidate temp mapping
80176771Sraj *
81176771Sraj * locore register use:
82176771Sraj *	r1	: stack pointer
83176771Sraj *	r2	: unused
84176771Sraj *	r3	: kernel_text
85176771Sraj *	r4	: _end
86176771Sraj *	r5	: metadata pointer
87176771Sraj *	r6-r9	: unused
88176771Sraj *	r10	: entry we started in
89176771Sraj *	r11	: temp entry
90176771Sraj *	r12	: AS we started in
91176771Sraj *	r13-r31 : auxiliary registers
92176771Sraj */
93176771Sraj
94176771Sraj/*
95176771Sraj * Move metadata ptr to r5
96176771Sraj */
97176771Sraj	mr	%r5, %r3
98176771Sraj
99176771Sraj/*
100176771Sraj * Initial cleanup
101176771Sraj */
102176771Sraj	li	%r16, 0x200		/* Keep debug exceptions for CodeWarrior. */
103176771Sraj	mtmsr	%r16
104176771Sraj	isync
105176771Sraj#if 0
106176771Sraj	mtspr	SPR_HID0, %r16
107176771Sraj	isync
108176771Sraj	msync
109176771Sraj	mtspr	SPR_HID1, %r16
110176771Sraj	isync
111176771Sraj#endif
112176771Sraj
113176771Sraj	/* Issue INV_ALL Invalidate on TLB0 */
114176771Sraj	li      %r16, 0x04
115176771Sraj	tlbivax	0, %r16
116176771Sraj	isync
117176771Sraj	msync
118176771Sraj
119176771Sraj/*
120176771Sraj * Use tblsx to locate the TLB1 entry that maps kernel code
121176771Sraj */
122176771Sraj	bl	1f			/* Current address */
123176771Sraj1:	mflr	%r15
124176771Sraj
125176771Sraj	/* Find entry that maps current address */
126176771Sraj	mfspr	%r17, SPR_PID0
127176771Sraj	slwi	%r17, %r17, MAS6_SPID0_SHIFT
128176771Sraj	mtspr	SPR_MAS6, %r17
129176771Sraj	isync
130176771Sraj	tlbsx	0, %r15
131176771Sraj
132176771Sraj	/* Copy entry number to r10 */
133176771Sraj	mfspr	%r17, SPR_MAS0
134176771Sraj	rlwinm	%r10, %r17, 16, 28, 31
135176771Sraj
136176771Sraj	/* Invalidate TLB1, skipping our entry. */
137176771Sraj	mfspr	%r17, SPR_TLB1CFG	/* Get number of entries */
138176771Sraj	andi.	%r17, %r17, TLBCFG_NENTRY_MASK@l
139176771Sraj	li	%r16, 0			/* Start from Entry 0 */
140176771Sraj
141176771Sraj2:	lis	%r15, MAS0_TLBSEL1@h	/* Select TLB1 */
142176771Sraj	rlwimi	%r15, %r16, 16, 12, 15
143176771Sraj	mtspr	SPR_MAS0, %r15
144176771Sraj	isync
145176771Sraj	tlbre
146176771Sraj	mfspr	%r15, SPR_MAS1
147176771Sraj	cmpw	%r16, %r10
148176771Sraj	beq	3f
149176771Sraj	/* Clear VALID and IPROT bits for other entries */
150176771Sraj	rlwinm	%r15, %r15, 0, 2, 31
151176771Sraj	mtspr	SPR_MAS1, %r15
152176771Sraj	isync
153176771Sraj	tlbwe
154176771Sraj	isync
155176771Sraj	msync
156176771Sraj3:	addi	%r16, %r16, 1
157176771Sraj	cmpw	%r16, %r17		/* Check if this is the last entry */
158176771Sraj	bne	2b
159176771Sraj
160176771Sraj/*
161176771Sraj * Create temporary mapping in the other Address Space
162176771Sraj */
163176771Sraj	lis	%r17, MAS0_TLBSEL1@h	/* Select TLB1 */
164176771Sraj	rlwimi	%r17, %r10, 16, 12, 15	/* Select our entry */
165176771Sraj	mtspr	SPR_MAS0, %r17
166176771Sraj	isync
167176771Sraj	tlbre				/* Read it in */
168176771Sraj
169176771Sraj	/* Prepare and write temp entry */
170176771Sraj	lis	%r17, MAS0_TLBSEL1@h	/* Select TLB1 */
171176771Sraj	addi	%r11, %r10, 0x1		/* Use next entry. */
172176771Sraj	rlwimi	%r17, %r11, 16, 12, 15	/* Select temp entry */
173176771Sraj	mtspr	SPR_MAS0, %r17
174176771Sraj	isync
175176771Sraj
176176771Sraj	mfspr	%r16, SPR_MAS1
177176771Sraj	li	%r15, 1			/* AS 1 */
178176771Sraj	rlwimi	%r16, %r15, 12, 19, 19
179176771Sraj	mtspr	SPR_MAS1, %r16
180176771Sraj	li	%r17, 0
181176771Sraj	rlwimi	%r16, %r17, 0, 8, 15	/* Global mapping, TID=0 */
182176771Sraj	isync
183176771Sraj
184176771Sraj	tlbwe
185176771Sraj	isync
186176771Sraj	msync
187176771Sraj
188176771Sraj	mfmsr	%r16
189176771Sraj	ori	%r16, %r16, 0x30	/* Switch to AS 1. */
190176771Sraj
191176771Sraj	bl	4f			/* Find current execution address */
192176771Sraj4:	mflr	%r15
193176771Sraj	addi	%r15, %r15, 20		/* Increment to instruction after rfi */
194176771Sraj	mtspr	SPR_SRR0, %r15
195176771Sraj	mtspr	SPR_SRR1, %r16
196176771Sraj	rfi				/* Switch context */
197176771Sraj
198176771Sraj/*
199176771Sraj * Invalidate initial entry
200176771Sraj */
201176771Sraj	mr	%r22, %r10
202176771Sraj	bl	tlb1_inval_entry
203176771Sraj
204176771Sraj/*
205176771Sraj * Setup final mapping in TLB1[1] and switch to it
206176771Sraj */
207176771Sraj	/* Final kernel mapping, map in 16 MB of RAM */
208176771Sraj	lis	%r16, MAS0_TLBSEL1@h	/* Select TLB1 */
209176771Sraj	li	%r17, 1			/* Entry 1 */
210176771Sraj	rlwimi	%r16, %r17, 16, 12, 15
211176771Sraj	mtspr	SPR_MAS0, %r16
212176771Sraj	isync
213176771Sraj
214176771Sraj	li	%r16, (TLB_SIZE_16M << MAS1_TSIZE_SHIFT)@l
215176771Sraj	oris	%r16, %r16, (MAS1_VALID | MAS1_IPROT)@h
216176771Sraj	mtspr	SPR_MAS1, %r16
217176771Sraj	isync
218176771Sraj
219176771Sraj	lis	%r19, KERNBASE@h
220176771Sraj	ori	%r19, %r19, KERNBASE@l
221176771Sraj	mtspr	SPR_MAS2, %r19		/* Set final EPN, clear WIMG */
222176771Sraj	isync
223176771Sraj
224176771Sraj	bl	5f
225176771Sraj5:	mflr	%r16			/* Use current address */
226176771Sraj	lis	%r18, 0xff00		/* 16MB alignment mask */
227176771Sraj	and	%r16, %r16, %r18
228176771Sraj	mr	%r25, %r16		/* Copy kernel load address */
229176771Sraj	ori	%r16, %r16, (MAS3_SX | MAS3_SW | MAS3_SR)@l
230176771Sraj	mtspr	SPR_MAS3, %r16		/* Set RPN and protection */
231176771Sraj	isync
232176771Sraj	tlbwe
233176771Sraj	isync
234176771Sraj	msync
235176771Sraj
236176771Sraj	/* Switch to the above TLB1[1] mapping */
237176771Sraj	lis	%r18, 0x00ff		/* 16MB offset mask */
238176771Sraj	ori	%r18, %r18, 0xffff
239176771Sraj	bl	6f
240176771Sraj6:	mflr	%r20			/* Use current address */
241176771Sraj	and	%r20, %r20, %r18	/* Offset from kernel load address */
242176771Sraj	add	%r20, %r20, %r19	/* Move to kernel virtual address */
243176771Sraj	addi	%r20, %r20, 32		/* Increment to instr. after rfi  */
244176771Sraj	li	%r21, 0x200
245176771Sraj	mtspr   SPR_SRR0, %r20
246176771Sraj	mtspr   SPR_SRR1, %r21
247176771Sraj	rfi
248176771Sraj
249176771Sraj	/* Save kernel load address for later use */
250176771Sraj	lis	%r24, kernload@ha
251176771Sraj	addi	%r24, %r24, kernload@l
252176771Sraj	stw	%r25, 0(%r24)
253176771Sraj
254176771Sraj/*
255176771Sraj * Invalidate temp mapping
256176771Sraj */
257176771Sraj	mr	%r22, %r11
258176771Sraj	bl	tlb1_inval_entry
259176771Sraj
260176771Sraj/*
261176771Sraj * Setup a temporary stack
262176771Sraj */
263182198Sraj	lis	%r1, tmpstack@ha
264182198Sraj	addi	%r1, %r1, tmpstack@l
265182198Sraj	addi	%r1, %r1, (TMPSTACKSZ - 8)
266176771Sraj
267176771Sraj/*
268176771Sraj * Intialise exception vector offsets
269176771Sraj */
270176771Sraj	bl	ivor_setup
271176771Sraj
272176771Sraj/*
273176771Sraj * Jump to system initialization code
274176771Sraj *
275176771Sraj * Setup first two arguments for e500_init, metadata (r5) is already in place.
276176771Sraj */
277176771Sraj	lis	%r3, kernel_text@ha
278176771Sraj	addi	%r3, %r3, kernel_text@l
279176771Sraj	lis	%r4, _end@ha
280176771Sraj	addi	%r4, %r4, _end@l
281176771Sraj
282182198Sraj	bl	e500_init
283182198Sraj
284182198Sraj	/* Switch to thread0.td_kstack */
285182198Sraj	mr	%r1, %r3
286182198Sraj	li	%r3, 0
287182198Sraj	stw	%r3, 0(%r1)
288182198Sraj
289176771Sraj	bl	mi_startup  /* Machine independet part, does not return */
290176771Sraj
291176771Sraj/************************************************************************/
292176771Sraj/* locore subroutines */
293176771Sraj/************************************************************************/
294176771Sraj
295176771Srajtlb1_inval_entry:
296176771Sraj	lis	%r17, MAS0_TLBSEL1@h	/* Select TLB1 */
297176771Sraj	rlwimi	%r17, %r22, 16, 12, 15	/* Select our entry */
298176771Sraj	mtspr	SPR_MAS0, %r17
299176771Sraj	isync
300176771Sraj	tlbre				/* Read it in */
301176771Sraj
302176771Sraj	li	%r16, 0
303176771Sraj	mtspr	SPR_MAS1, %r16
304176771Sraj	isync
305176771Sraj	tlbwe
306176771Sraj	isync
307176771Sraj	msync
308176771Sraj	blr
309176771Sraj
310176771Srajivor_setup:
311176771Sraj	/* Set base address of interrupt handler routines */
312176771Sraj	lis	%r21, interrupt_vector_base@h
313176771Sraj	mtspr	SPR_IVPR, %r21
314176771Sraj
315176771Sraj	/* Assign interrupt handler routines offsets */
316176771Sraj	li	%r21, int_critical_input@l
317176771Sraj	mtspr	SPR_IVOR0, %r21
318176771Sraj	li	%r21, int_machine_check@l
319176771Sraj	mtspr	SPR_IVOR1, %r21
320176771Sraj	li	%r21, int_data_storage@l
321176771Sraj	mtspr	SPR_IVOR2, %r21
322176771Sraj	li	%r21, int_instr_storage@l
323176771Sraj	mtspr	SPR_IVOR3, %r21
324176771Sraj	li	%r21, int_external_input@l
325176771Sraj	mtspr	SPR_IVOR4, %r21
326176771Sraj	li	%r21, int_alignment@l
327176771Sraj	mtspr	SPR_IVOR5, %r21
328176771Sraj	li	%r21, int_program@l
329176771Sraj	mtspr	SPR_IVOR6, %r21
330176771Sraj	li	%r21, int_syscall@l
331176771Sraj	mtspr	SPR_IVOR8, %r21
332176771Sraj	li	%r21, int_decrementer@l
333176771Sraj	mtspr	SPR_IVOR10, %r21
334176771Sraj	li	%r21, int_fixed_interval_timer@l
335176771Sraj	mtspr	SPR_IVOR11, %r21
336176771Sraj	li	%r21, int_watchdog@l
337176771Sraj	mtspr	SPR_IVOR12, %r21
338176771Sraj	li	%r21, int_data_tlb_error@l
339176771Sraj	mtspr	SPR_IVOR13, %r21
340176771Sraj	li	%r21, int_inst_tlb_error@l
341176771Sraj	mtspr	SPR_IVOR14, %r21
342176771Sraj	li	%r21, int_debug@l
343176771Sraj	mtspr	SPR_IVOR15, %r21
344176771Sraj	blr
345176771Sraj
346176771Sraj/*
347176771Sraj * void tlb1_inval_va(vm_offset_t va)
348176771Sraj *
349176771Sraj * r3 - va to invalidate
350176771Sraj */
351176771SrajENTRY(tlb1_inval_va)
352176771Sraj	/* EA mask */
353176771Sraj	lis	%r6, 0xffff
354176771Sraj	ori	%r6, %r6, 0xf000
355176771Sraj	and	%r3, %r3, %r6
356176771Sraj
357176771Sraj	/* Select TLB1 */
358176771Sraj	ori	%r3, %r3, 0x08
359176771Sraj
360176771Sraj	isync
361176771Sraj	tlbivax 0, %r3
362176771Sraj	isync
363176771Sraj	msync
364176771Sraj	blr
365176771Sraj
366176771Sraj/*
367176771Sraj * void tlb0_inval_va(vm_offset_t va)
368176771Sraj *
369176771Sraj * r3 - va to invalidate
370176771Sraj */
371176771SrajENTRY(tlb0_inval_va)
372176771Sraj	/* EA mask, this also clears TLBSEL, selecting TLB0 */
373176771Sraj	lis	%r6, 0xffff
374176771Sraj	ori	%r6, %r6, 0xf000
375176771Sraj	and	%r3, %r3, %r6
376176771Sraj
377176771Sraj	isync
378176771Sraj	tlbivax 0, %r3
379176771Sraj	isync
380176771Sraj	msync
381176771Sraj	blr
382176771Sraj
383176771Sraj/*
384176771Sraj * Cache disable/enable/inval sequences according
385176771Sraj * to section 2.16 of E500CORE RM.
386176771Sraj */
387176771SrajENTRY(dcache_inval)
388176771Sraj	/* Invalidate d-cache */
389176771Sraj	mfspr	%r3, SPR_L1CSR0
390176771Sraj	ori	%r3, %r3, (L1CSR0_DCFI | L1CSR0_DCLFR)@l
391176771Sraj	msync
392176771Sraj	isync
393176771Sraj	mtspr	SPR_L1CSR0, %r3
394176771Sraj	isync
395176771Sraj	blr
396176771Sraj
397176771SrajENTRY(dcache_disable)
398176771Sraj	/* Disable d-cache */
399176771Sraj	mfspr	%r3, SPR_L1CSR0
400176771Sraj	li	%r4, L1CSR0_DCE@l
401176771Sraj	not	%r4, %r4
402176771Sraj	and	%r3, %r3, %r4
403176771Sraj	msync
404176771Sraj	isync
405176771Sraj	mtspr	SPR_L1CSR0, %r3
406176771Sraj	isync
407176771Sraj	blr
408176771Sraj
409176771SrajENTRY(dcache_enable)
410176771Sraj	/* Enable d-cache */
411176771Sraj	mfspr	%r3, SPR_L1CSR0
412176771Sraj	oris	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@h
413176771Sraj	ori	%r3, %r3, (L1CSR0_DCPE | L1CSR0_DCE)@l
414176771Sraj	msync
415176771Sraj	isync
416176771Sraj	mtspr	SPR_L1CSR0, %r3
417176771Sraj	isync
418176771Sraj	blr
419176771Sraj
420176771SrajENTRY(icache_inval)
421176771Sraj	/* Invalidate i-cache */
422176771Sraj	mfspr	%r3, SPR_L1CSR1
423176771Sraj	ori	%r3, %r3, (L1CSR1_ICFI | L1CSR1_ICLFR)@l
424176771Sraj	isync
425176771Sraj	mtspr	SPR_L1CSR1, %r3
426176771Sraj	isync
427176771Sraj	blr
428176771Sraj
429176771SrajENTRY(icache_disable)
430176771Sraj	/* Disable i-cache */
431176771Sraj	mfspr	%r3, SPR_L1CSR1
432176771Sraj	li	%r4, L1CSR1_ICE@l
433176771Sraj	not	%r4, %r4
434176771Sraj	and	%r3, %r3, %r4
435176771Sraj	isync
436176771Sraj	mtspr	SPR_L1CSR1, %r3
437176771Sraj	isync
438176771Sraj	blr
439176771Sraj
440176771SrajENTRY(icache_enable)
441176771Sraj	/* Enable i-cache */
442176771Sraj	mfspr	%r3, SPR_L1CSR1
443176771Sraj	oris	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@h
444176771Sraj	ori	%r3, %r3, (L1CSR1_ICPE | L1CSR1_ICE)@l
445176771Sraj	isync
446176771Sraj	mtspr	SPR_L1CSR1, %r3
447176771Sraj	isync
448176771Sraj	blr
449176771Sraj
450176771Sraj/*
451176771Sraj * int setfault()
452176771Sraj *
453176771Sraj * Similar to setjmp to setup for handling faults on accesses to user memory.
454176771Sraj * Any routine using this may only call bcopy, either the form below,
455176771Sraj * or the (currently used) C code optimized, so it doesn't use any non-volatile
456176771Sraj * registers.
457176771Sraj */
458176771Sraj	.globl	setfault
459176771Srajsetfault:
460176771Sraj	mflr	%r0
461176771Sraj	mfsprg0	%r4
462176771Sraj	lwz	%r4, PC_CURTHREAD(%r4)
463176771Sraj	lwz	%r4, TD_PCB(%r4)
464176771Sraj	stw	%r3, PCB_ONFAULT(%r4)
465176771Sraj	mfcr	%r10
466176771Sraj	mfctr	%r11
467176771Sraj	mfxer	%r12
468176771Sraj	stw	%r0, 0(%r3)
469176771Sraj	stw	%r1, 4(%r3)
470176771Sraj	stw	%r2, 8(%r3)
471176771Sraj	stmw	%r10, 12(%r3)		/* store CR, CTR, XER, [r13 .. r31] */
472176771Sraj	li	%r3, 0			/* return FALSE */
473176771Sraj	blr
474176771Sraj
475176771Sraj/************************************************************************/
476176771Sraj/* Data section								*/
477176771Sraj/************************************************************************/
478176771Sraj	.data
479176771Sraj	.align	4
480182198Srajtmpstack:
481182198Sraj	.space	TMPSTACKSZ
482176771Sraj
483176771Sraj/*
484176771Sraj * Compiled KERNBASE locations
485176771Sraj */
486176771Sraj	.globl	kernbase
487176771Sraj	.set	kernbase, KERNBASE
488176771Sraj
489176771Sraj/*
490176771Sraj * Globals
491176771Sraj */
492176771Sraj#define	INTRCNT_COUNT	256		/* max(HROWPIC_IRQMAX,OPENPIC_IRQMAX) */
493176771Sraj
494176771SrajGLOBAL(kernload)
495176771Sraj	.long
496176771SrajGLOBAL(intrnames)
497176771Sraj	.space	INTRCNT_COUNT * (MAXCOMLEN + 1) * 2
498176771SrajGLOBAL(eintrnames)
499176771Sraj	.align 4
500176771SrajGLOBAL(intrcnt)
501176771Sraj	.space	INTRCNT_COUNT * 4 * 2
502176771SrajGLOBAL(eintrcnt)
503176771Sraj
504176771Sraj#include <powerpc/booke/trap_subr.S>
505