1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
8 */
9#include <asm/asm.h>
10#include <asm/offset.h>
11#include <asm/regdef.h>
12#include <asm/mipsregs.h>
13#include <asm/stackframe.h>
14
15#define EX(insn,reg,addr,handler)			\
169:	insn	reg, addr;				\
17	.section __ex_table,"a"; 			\
18	PTR	9b, handler; 				\
19	.previous
20
21#define F_FILL64(dst, offset, val, fixup)		\
22	EX(sd, val, (offset + 0x00)(dst), fixup);	\
23	EX(sd, val, (offset + 0x08)(dst), fixup);	\
24	EX(sd, val, (offset + 0x10)(dst), fixup);	\
25	EX(sd, val, (offset + 0x18)(dst), fixup);	\
26	EX(sd, val, (offset + 0x20)(dst), fixup);	\
27	EX(sd, val, (offset + 0x28)(dst), fixup);	\
28	EX(sd, val, (offset + 0x30)(dst), fixup);	\
29	EX(sd, val, (offset + 0x38)(dst), fixup)
30
31/*
32 * memset(void *s, int c, size_t n)
33 *
34 * a0: start of area to clear
35 * a1: char to fill with
36 * a2: size of area to clear
37 */
38	.set	noreorder
39	.align	5
40LEAF(memset)
41	beqz	a1, 1f
42	 move	v0, a0				/* result */
43
44	andi	a1, 0xff			/* spread fillword */
45	dsll	t1, a1, 8
46	or	a1, t1
47	dsll	t1, a1, 16
48	or	a1, t1
49	dsll	t1, a1, 32
50	or	a1, t1
51
521:
53
54FEXPORT(__bzero)
55	sltiu	t0, a2, 8			/* very small region? */
56	bnez	t0, small_memset
57	 andi	t0, a0, 7			/* aligned? */
58
59	beqz	t0, 1f
60	 dsubu	t0, 8				/* alignment in bytes */
61
62#ifdef __MIPSEB__
63	EX(sdl, a1, (a0), first_fixup)		/* make dword aligned */
64#endif
65#ifdef __MIPSEL__
66	EX(sdr, a1, (a0), first_fixup)		/* make dword aligned */
67#endif
68	dsubu	a0, t0				/* dword align ptr */
69	daddu	a2, t0				/* correct size */
70
711:	ori	t1, a2, 0x3f			/* # of full blocks */
72	xori	t1, 0x3f
73	beqz	t1, memset_partial		/* no block to fill */
74	 andi	t0, a2, 0x38
75
76	daddu	t1, a0				/* end address */
77	.set	reorder
781:	daddiu	a0, 64
79	F_FILL64(a0, -64, a1, fwd_fixup)
80	bne	t1, a0, 1b
81	.set	noreorder
82
83memset_partial:
84	PTR_LA	t1, 2f				/* where to start */
85	.set	noat
86	dsrl	AT, t0, 1
87	dsubu	t1, AT
88	.set	noat
89	jr	t1
90	 daddu	a0, t0				/* dest ptr */
91
92	.set	push
93	.set	noreorder
94	.set	nomacro
95	F_FILL64(a0, -64, a1, partial_fixup)	/* ... but first do dwds ... */
962:	.set	pop
97	andi	a2, 7				/* 0 <= n <= 7 to go */
98
99	beqz	a2, 1f
100	 daddu	a0, a2				/* What's left */
101#ifdef __MIPSEB__
102	EX(sdr, a1, -1(a0), last_fixup)
103#endif
104#ifdef __MIPSEL__
105	EX(sdl, a1, -1(a0), last_fixup)
106#endif
1071:	jr	ra
108	 move	a2, zero
109
110small_memset:
111	beqz	a2, 2f
112	 daddu	t1, a0, a2
113
1141:	daddiu	a0, 1				/* fill bytewise */
115	bne	t1, a0, 1b
116	 sb	a1, -1(a0)
117
1182:	jr	ra				/* done */
119	 move	a2, zero
120	END(memset)
121
122first_fixup:
123	jr	ra
124	 nop
125
126fwd_fixup:
127	ld	t0, THREAD_BUADDR($28)
128	andi	a2, 0x3f
129	daddu	a2, t1
130	jr	ra
131	 dsubu	a2, t0
132
133partial_fixup:
134	ld	t0, THREAD_BUADDR($28)
135	andi	a2, 7
136	daddu	a2, t1
137	jr	ra
138	 dsubu	a2, t0
139
140last_fixup:
141	jr	ra
142	 andi	v1, a2, 7
143