1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998 by Ralf Baechle
7 */
8#include <asm/asm.h>
9#include <asm/offset.h>
10#include <asm/regdef.h>
11
12#define EX(insn,reg,addr,handler)			\
139:	insn	reg, addr;				\
14	.section __ex_table,"a"; 			\
15	PTR	9b, handler; 				\
16	.previous
17
18#define F_FILL64(dst, offset, val, fixup)		\
19	EX(sw, val, (offset + 0x00)(dst), fixup);	\
20	EX(sw, val, (offset + 0x04)(dst), fixup);	\
21	EX(sw, val, (offset + 0x08)(dst), fixup);	\
22	EX(sw, val, (offset + 0x0c)(dst), fixup);	\
23	EX(sw, val, (offset + 0x10)(dst), fixup);	\
24	EX(sw, val, (offset + 0x14)(dst), fixup);	\
25	EX(sw, val, (offset + 0x18)(dst), fixup);	\
26	EX(sw, val, (offset + 0x1c)(dst), fixup);	\
27	EX(sw, val, (offset + 0x20)(dst), fixup);	\
28	EX(sw, val, (offset + 0x24)(dst), fixup);	\
29	EX(sw, val, (offset + 0x28)(dst), fixup);	\
30	EX(sw, val, (offset + 0x2c)(dst), fixup);	\
31	EX(sw, val, (offset + 0x30)(dst), fixup);	\
32	EX(sw, val, (offset + 0x34)(dst), fixup);	\
33	EX(sw, val, (offset + 0x38)(dst), fixup);	\
34	EX(sw, val, (offset + 0x3c)(dst), fixup)
35
36/*
37 * memset(void *s, int c, size_t n)
38 *
39 * a0: start of area to clear
40 * a1: char to fill with
41 * a2: size of area to clear
42 */
43	.set	noreorder
44	.align	5
45LEAF(memset)
46	beqz	a1, 1f
47	 move	v0, a0				/* result */
48
49	andi	a1, 0xff			/* spread fillword */
50	sll	t1, a1, 8
51	or	a1, t1
52	sll	t1, a1, 16
53	or	a1, t1
541:
55
56EXPORT(__bzero)
57	sltiu	t0, a2, 4			/* very small region? */
58	bnez	t0, small_memset
59	 andi	t0, a0, 3			/* aligned? */
60
61	beqz	t0, 1f
62	 subu	t0, 4				/* alignment in bytes */
63
64#ifdef __MIPSEB__
65	EX(swl, a1, (a0), first_fixup)		/* make word aligned */
66#endif
67#ifdef __MIPSEL__
68	EX(swr, a1, (a0), first_fixup)		/* make word aligned */
69#endif
70	subu	a0, t0				/* word align ptr */
71	addu	a2, t0				/* correct size */
72
731:	ori	t1, a2, 0x3f			/* # of full blocks */
74	xori	t1, 0x3f
75	beqz	t1, memset_partial		/* no block to fill */
76	 andi	t0, a2, 0x3c
77
78	addu	t1, a0				/* end address */
79	.set	reorder
801:	addiu	a0, 64
81	F_FILL64(a0, -64, a1, fwd_fixup)
82	bne	t1, a0, 1b
83	.set	noreorder
84
85memset_partial:
86	PTR_LA	t1, 2f				/* where to start */
87	subu	t1, t0
88	jr	t1
89	 addu	a0, t0				/* dest ptr */
90
91	.set	push
92	.set	noreorder
93	.set	nomacro
94	F_FILL64(a0, -64, a1, partial_fixup)	/* ... but first do wrds ... */
952:	.set	pop
96	andi	a2, 3				/* 0 <= n <= 3 to go */
97
98	beqz	a2, 1f
99	 addu	a0, a2				/* What's left */
100#ifdef __MIPSEB__
101	EX(swr, a1, -1(a0), last_fixup)
102#endif
103#ifdef __MIPSEL__
104	EX(swl, a1, -1(a0), last_fixup)
105#endif
1061:	jr	ra
107	 move	a2, zero
108
109small_memset:
110	beqz	a2, 2f
111	 addu	t1, a0, a2
112
1131:	addiu	a0, 1				/* fill bytewise */
114	bne	t1, a0, 1b
115	 sb	a1, -1(a0)
116
1172:	jr	ra				/* done */
118	 move	a2, zero
119	END(memset)
120
121first_fixup:
122	jr	ra
123	 nop
124
125fwd_fixup:
126	lw	t0, THREAD_BUADDR($28)
127	andi	a2, 0x3f
128	addu	a2, t1
129	jr	ra
130	 subu	a2, t0
131
132partial_fixup:
133	lw	t0, THREAD_BUADDR($28)
134	andi	a2, 3
135	addu	a2, t1
136	jr	ra
137	 subu	a2, t0
138
139last_fixup:
140	jr	ra
141	 andi	v1, a2, 3
142