1/*	$NetBSD: cpufunc_asm_armv5_ec.S,v 1.1 2007/01/06 00:50:54 christos Exp $	*/
2
3/*
4 * Copyright (c) 2002, 2005 ARM Limited
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 *    notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 *    notice, this list of conditions and the following disclaimer in the
14 *    documentation and/or other materials provided with the distribution.
15 * 3. The name of the company may not be used to endorse or promote
16 *    products derived from this software without specific prior written
17 *    permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
20 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22 * IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
23 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 * SUCH DAMAGE.
30 *
31 * ARMv5 assembly functions for manipulating caches.
32 * These routines can be used by any core that supports both the set/index
33 * operations and the test and clean operations for efficiently cleaning the
34 * entire DCache.  If a core does not have the test and clean operations, but
35 * does have the set/index operations, use the routines in cpufunc_asm_armv5.S.
36 * This source was derived from that file.
37 */
38
39#include <machine/asm.h>
40__FBSDID("$FreeBSD$");
41
42/*
43 * Functions to set the MMU Translation Table Base register
44 *
45 * We need to clean and flush the cache as it uses virtual
46 * addresses that are about to change.
47 */
48ENTRY(armv5_ec_setttb)
49	/*
50	 * Some other ARM ports save registers on the stack, call the
51	 * idcache_wbinv_all function and then restore the registers from the
52	 * stack before setting the TTB.  I observed that this caused a
53	 * problem when the old and new translation table entries' buffering
54	 * bits were different.  If I saved the registers in other registers
55	 * or invalidated the caches when I returned from idcache_wbinv_all,
56	 * it worked fine.  If not, I ended up executing at an invalid PC.
57	 * For armv5_ec_settb, the idcache_wbinv_all is simple enough, I just
58	 * do it directly and entirely avoid the problem.
59	 */
60	mcr	p15, 0, r0, c7, c5, 0	/* Invalidate ICache */
611:	mrc	p15, 0, APSR_nzcv, c7, c14, 3	/* Test, clean and invalidate DCache */
62	bne	1b			/* More to do? */
63	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
64
65	mcr	p15, 0, r0, c2, c0, 0	/* load new TTB */
66
67	mcr	p15, 0, r0, c8, c7, 0	/* invalidate I+D TLBs */
68	RET
69END(armv5_ec_setttb)
70
71/*
72 * Cache operations.  For the entire cache we use the enhanced cache
73 * operations.
74 */
75
76ENTRY_NP(armv5_ec_icache_sync_range)
77	ldr	ip, .Larmv5_ec_line_size
78	cmp	r1, #0x4000
79	bcs	.Larmv5_ec_icache_sync_all
80	ldr	ip, [ip]
81	sub	r1, r1, #1		/* Don't overrun */
82	sub	r3, ip, #1
83	and	r2, r0, r3
84	add	r1, r1, r2
85	bic	r0, r0, r3
861:
87	mcr	p15, 0, r0, c7, c5, 1	/* Invalidate I cache SE with VA */
88	mcr	p15, 0, r0, c7, c10, 1	/* Clean D cache SE with VA */
89	add	r0, r0, ip
90	subs	r1, r1, ip
91	bpl	1b
92	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
93	RET
94
95.Larmv5_ec_icache_sync_all:
96	/*
97	 * We assume that the code here can never be out of sync with the
98	 * dcache, so that we can safely flush the Icache and fall through
99	 * into the Dcache cleaning code.
100	 */
101	mcr	p15, 0, r0, c7, c5, 0	/* Flush I cache */
102	/* Fall through to clean Dcache. */
103
104.Larmv5_ec_dcache_wb:
1051:
106	mrc	p15, 0, APSR_nzcv, c7, c10, 3	/* Test and clean (don't invalidate) */
107	bne	1b			/* More to do? */
108	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
109	RET
110END(armv5_ec_icache_sync_range)
111
112.Larmv5_ec_line_size:
113	.word	_C_LABEL(arm_pdcache_line_size)
114
115ENTRY(armv5_ec_dcache_wb_range)
116	ldr	ip, .Larmv5_ec_line_size
117	cmp	r1, #0x4000
118	bcs	.Larmv5_ec_dcache_wb
119	ldr	ip, [ip]
120	sub	r1, r1, #1		/* Don't overrun */
121	sub	r3, ip, #1
122	and	r2, r0, r3
123	add	r1, r1, r2
124	bic	r0, r0, r3
1251:
126	mcr	p15, 0, r0, c7, c10, 1	/* Clean D cache SE with VA */
127	add	r0, r0, ip
128	subs	r1, r1, ip
129	bpl	1b
130	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
131	RET
132END(armv5_ec_dcache_wb_range)
133
134ENTRY(armv5_ec_dcache_wbinv_range)
135	ldr	ip, .Larmv5_ec_line_size
136	cmp	r1, #0x4000
137	bcs	.Larmv5_ec_dcache_wbinv_all
138	ldr	ip, [ip]
139	sub	r1, r1, #1		/* Don't overrun */
140	sub	r3, ip, #1
141	and	r2, r0, r3
142	add	r1, r1, r2
143	bic	r0, r0, r3
1441:
145	mcr	p15, 0, r0, c7, c14, 1	/* Purge D cache SE with VA */
146	add	r0, r0, ip
147	subs	r1, r1, ip
148	bpl	1b
149	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
150	RET
151END(armv5_ec_dcache_wbinv_range)
152
153/*
154 * Note, we must not invalidate everything.  If the range is too big we
155 * must use wb-inv of the entire cache.
156 */
157ENTRY(armv5_ec_dcache_inv_range)
158	ldr	ip, .Larmv5_ec_line_size
159	cmp	r1, #0x4000
160	bcs	.Larmv5_ec_dcache_wbinv_all
161	ldr	ip, [ip]
162	sub	r1, r1, #1		/* Don't overrun */
163	sub	r3, ip, #1
164	and	r2, r0, r3
165	add	r1, r1, r2
166	bic	r0, r0, r3
1671:
168	mcr	p15, 0, r0, c7, c6, 1	/* Invalidate D cache SE with VA */
169	add	r0, r0, ip
170	subs	r1, r1, ip
171	bpl	1b
172	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
173	RET
174END(armv5_ec_dcache_inv_range)
175
176ENTRY(armv5_ec_idcache_wbinv_range)
177	ldr	ip, .Larmv5_ec_line_size
178	cmp	r1, #0x4000
179	bcs	.Larmv5_ec_idcache_wbinv_all
180	ldr	ip, [ip]
181	sub	r1, r1, #1		/* Don't overrun */
182	sub	r3, ip, #1
183	and	r2, r0, r3
184	add	r1, r1, r2
185	bic	r0, r0, r3
1861:
187	mcr	p15, 0, r0, c7, c5, 1	/* Invalidate I cache SE with VA */
188	mcr	p15, 0, r0, c7, c14, 1	/* Purge D cache SE with VA */
189	add	r0, r0, ip
190	subs	r1, r1, ip
191	bpl	1b
192	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
193	RET
194END(armv5_ec_idcache_wbinv_range)
195
196ENTRY_NP(armv5_ec_idcache_wbinv_all)
197.Larmv5_ec_idcache_wbinv_all:
198	/*
199	 * We assume that the code here can never be out of sync with the
200	 * dcache, so that we can safely flush the Icache and fall through
201	 * into the Dcache purging code.
202	 */
203	mcr	p15, 0, r0, c7, c5, 0	/* Invalidate ICache */
204	/* Fall through to purge Dcache. */
205END(armv5_ec_idcache_wbinv_all)
206
207ENTRY(armv5_ec_dcache_wbinv_all)
208.Larmv5_ec_dcache_wbinv_all:
2091:	mrc	p15, 0, APSR_nzcv, c7, c14, 3	/* Test, clean and invalidate DCache */
210	bne	1b			/* More to do? */
211	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
212	RET
213END(armv5_ec_dcache_wbinv_all)
214
215