1204687Simp/*	$NetBSD: rtld_start.S,v 1.10 2009/12/14 00:41:19 matt Exp $	*/
2177924Simp
3177924Simp/*
4177924Simp * Copyright 1997 Michael L. Hitch <mhitch@montana.edu>
5177924Simp * Portions copyright 2002 Charles M. Hannum <root@ihack.net>
6177924Simp * All rights reserved.
7177924Simp *
8177924Simp * Redistribution and use in source and binary forms, with or without
9177924Simp * modification, are permitted provided that the following conditions
10177924Simp * are met:
11177924Simp * 1. Redistributions of source code must retain the above copyright
12177924Simp *    notice, this list of conditions and the following disclaimer.
13177924Simp * 2. Redistributions in binary form must reproduce the above copyright
14177924Simp *    notice, this list of conditions and the following disclaimer in the
15177924Simp *    documentation and/or other materials provided with the distribution.
16177924Simp * 3. The name of the author may not be used to endorse or promote products
17177924Simp *    derived from this software without specific prior written permission.
18177924Simp *
19177924Simp * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
20177924Simp * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
21177924Simp * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22177924Simp * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
23177924Simp * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
24177924Simp * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25177924Simp * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26177924Simp * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27177924Simp * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28177924Simp * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29204687Simp *
30204687Simp * $FreeBSD$
31177924Simp */
32177924Simp
33177924Simp#include <machine/asm.h>
34177924Simp
35177924Simp.globl _C_LABEL(_rtld_relocate_nonplt_self)
36177924Simp.globl _C_LABEL(_rtld)
37177924Simp
38204687Simp#define	PTR_SIZE	(1<<PTR_SCALESHIFT)
39204687Simp
40204687Simp/*
41204687Simp *      a0      stack pointer
42204687Simp *      a1      rtld cleanup (filled in by dynamic loader)
43204687Simp *      a2      rtld object (filled in by dynamic loader)
44204687Simp *      a3      ps_strings
45204687Simp */
46177924SimpLEAF(rtld_start)
47204687Simp	.frame	sp, 4*PTR_SIZE, ra
48204687Simp	.mask	0x10090000,-PTR_SIZE
49177924Simp	.set	noreorder
50204687Simp	SETUP_GP
51204687Simp	PTR_SUBU sp, 4*PTR_SIZE		/* adjust stack pointer */
52204687Simp	SETUP_GP64(s4, rtld_start)
53204687Simp	SAVE_GP(0)
54204687Simp					/* -> 1*PTR_SIZE(sp) for atexit */
55204687Simp					/* -> 2*PTR_SIZE(sp) for obj_main */
56204687Simp	move	s0, a0			/* save stack pointer from a0 */
57204687Simp	move	s3, a3			/* save ps_strings pointer */
58177924Simp
59204687Simp	PTR_LA	a1, 1f
60177924Simp	bal	1f
61232884Sgonzo	PTR_LA	t0, _C_LABEL(_rtld_relocate_nonplt_self)
62204687Simp1:	PTR_SUBU a1, ra, a1		/* relocbase */
63204687Simp	PTR_LA	a0, _DYNAMIC
64204687Simp	PTR_ADDU t9, a1, t0
65204687Simp	jalr	t9			/* _rtld_relocate_nonplt_self(dynp, relocabase) */
66232884Sgonzo	PTR_ADDU a0, a1, a0		/* &_DYNAMIC */
67177924Simp
68204687Simp	move	a0, s0			/* sp */
69204687Simp	PTR_ADDU a1, sp, 2*PTR_SIZE	/* &our atexit function */
70204687Simp	PTR_ADDU a2, sp, 3*PTR_SIZE	/* obj_main entry */
71232892Sgonzo	PTR_SUBU sp, 4*SZREG		/* ABI requires to reserve memory for 4 regs */
72209239Sjchandra	PTR_LA	t9, _C_LABEL(_rtld)
73209239Sjchandra	jalr	t9			/* v0 = _rtld(sp, cleanup, objp) */
74232884Sgonzo	nop
75232892Sgonzo	PTR_ADDU sp, 4*SZREG
76177924Simp
77204687Simp	PTR_L	a1, 2*PTR_SIZE(sp)	/* our atexit function */
78204687Simp	PTR_L	a2, 3*PTR_SIZE(sp)	/* obj_main entry */
79204687Simp	PTR_ADDU sp, 4*PTR_SIZE		/* readjust stack */
80204687Simp	move	a0, s0			/* stack pointer */
81204687Simp	move	t9, v0
82232893Sgonzo	PTR_SUBU sp, 4*SZREG		/* ABI requires to reserve memory for 4 regs */
83233378Sgonzo	move	ra,t9			/* RA == PC signals backtrace routine to stop */
84233378Sgonzo	j	t9			/* _start(sp, cleanup, obj); */
85232884Sgonzo	move	a3, s3			/* restore ps_strings */
86177924SimpEND(rtld_start)
87177924Simp
88204687Simp#define	XCALLFRAME_SIZ		(12*SZREG)
89204687Simp#define	XCALLFRAME_RA		(10*SZREG)
90204687Simp#define	XCALLFRAME_GP		(9*SZREG)
91204687Simp#define	XCALLFRAME_S0		(8*SZREG)
92204687Simp#define	XCALLFRAME_A3		(7*SZREG)
93204687Simp#define	XCALLFRAME_A2		(6*SZREG)
94204687Simp#define	XCALLFRAME_A1		(5*SZREG)
95204687Simp#define	XCALLFRAME_A0		(4*SZREG)
96204687Simp#if defined(__mips_n32) || defined(__mips_n64)
97204687Simp#define	XCALLFRAME_A7		(3*SZREG)
98204687Simp#define	XCALLFRAME_A6		(2*SZREG)
99204687Simp#define	XCALLFRAME_A5		(1*SZREG)
100204687Simp#define	XCALLFRAME_A4		(0*SZREG)
101204687Simp#endif
102204687Simp
103177924Simp	.globl	_rtld_bind_start
104177924Simp	.ent	_rtld_bind_start
105177924Simp_rtld_bind_start:
106204687Simp	.frame	sp, XCALLFRAME_SIZ, $15
107204687Simp	move	v1, gp			/* save old GP */
108204687Simp#if defined(__mips_o32) || defined(__mips_o64)
109204687Simp	PTR_ADDU t9, 8			/* modify T9 to point at .cpload */
110204687Simp#endif
111204687Simp	SETUP_GP
112204687Simp	PTR_SUBU sp, XCALLFRAME_SIZ	/* save arguments and sp value in stack */
113204687Simp	SETUP_GP64(XCALLFRAME_GP, _rtld_bind_start)
114204687Simp	SAVE_GP(XCALLFRAME_GP)
115204687Simp#if defined(__mips_n32) || defined(__mips_n64)
116204687Simp	REG_S	a4,  XCALLFRAME_A4(sp)
117204687Simp	REG_S	a5,  XCALLFRAME_A5(sp)
118204687Simp	REG_S	a6,  XCALLFRAME_A6(sp)
119204687Simp	REG_S	a7,  XCALLFRAME_A7(sp)
120204687Simp#endif
121204687Simp	REG_S	a0,  XCALLFRAME_A0(sp)
122204687Simp	REG_S	a1,  XCALLFRAME_A1(sp)
123204687Simp	REG_S	a2,  XCALLFRAME_A2(sp)
124204687Simp	REG_S	a3,  XCALLFRAME_A3(sp)
125204687Simp	REG_S	$15,  XCALLFRAME_RA(sp)	/* ra is in t7/t3 */
126204687Simp	REG_S	s0,  XCALLFRAME_S0(sp)
127204687Simp	move	s0, sp
128177924Simp
129204687Simp	move	a0, v1			/* old GP */
130209239Sjchandra	PTR_SUBU	a0, a0, 0x7ff0		/* The offset of $gp from the	*/
131204687Simp       					/* beginning of the .got section: */
132177924Simp					/* $gp = .got + 0x7ff0, so	*/
133177924Simp					/* .got = $gp - 0x7ff0		*/
134177924Simp					/* Simple math as you can see.	*/
135204687Simp#if defined(__mips_n64)
136210629Sjchandra	ld	a0, 8(a0)		/* object = pltgot[1] */
137210629Sjchandra	and	a0, a0, 0x7fffffffffffffff
138204687Simp#else
139204687Simp	lw	a0, 4(a0)		/* object = pltgot[1] & 0x7fffffff */
140210629Sjchandra	and	a0, a0, 0x7fffffff
141204687Simp#endif
142204687Simp	move	a1, t8			/* symbol index */
143177924Simp
144209239Sjchandra	PTR_LA	t9, _C_LABEL(_mips_rtld_bind)
145209239Sjchandra	jalr	t9
146232884Sgonzo	nop
147204687Simp
148204687Simp	move	sp, s0
149204687Simp	REG_L	ra, XCALLFRAME_RA(sp)
150204687Simp	REG_L	s0, XCALLFRAME_S0(sp)
151204687Simp	REG_L	a0, XCALLFRAME_A0(sp)
152204687Simp	REG_L	a1, XCALLFRAME_A1(sp)
153204687Simp	REG_L	a2, XCALLFRAME_A2(sp)
154204687Simp	REG_L	a3, XCALLFRAME_A3(sp)
155204687Simp#if defined(__mips_n32) || defined(__mips_n64)
156204687Simp	REG_L	a4, XCALLFRAME_A4(sp)
157204687Simp	REG_L	a5, XCALLFRAME_A5(sp)
158204687Simp	REG_L	a6, XCALLFRAME_A6(sp)
159204687Simp	REG_L	a7, XCALLFRAME_A7(sp)
160204687Simp#endif
161204687Simp	RESTORE_GP64
162204687Simp	PTR_ADDU sp, XCALLFRAME_SIZ
163204687Simp	move	t9, v0
164177924Simp	jr	t9
165232884Sgonzo	nop
166204687SimpEND(_rtld_bind_start)
167