1/*	$NetBSD: cpufunc_asm_armv5_ec.S,v 1.1 2007/01/06 00:50:54 christos Exp $	*/
2
3/*
4 * Copyright (c) 2002, 2005 ARM Limited
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 *    notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 *    notice, this list of conditions and the following disclaimer in the
14 *    documentation and/or other materials provided with the distribution.
15 * 3. The name of the company may not be used to endorse or promote
16 *    products derived from this software without specific prior written
17 *    permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
20 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22 * IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
23 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 * SUCH DAMAGE.
30 *
31 * ARMv5 assembly functions for manipulating caches.
32 * These routines can be used by any core that supports both the set/index
33 * operations and the test and clean operations for efficiently cleaning the
34 * entire DCache.  If a core does not have the test and clean operations, but
35 * does have the set/index operations, use the routines in cpufunc_asm_armv5.S.
36 * This source was derived from that file.
37 */
38
39#include <machine/asm.h>
40__FBSDID("$FreeBSD$");
41
42/*
43 * Functions to set the MMU Translation Table Base register
44 *
45 * We need to clean and flush the cache as it uses virtual
46 * addresses that are about to change.
47 */
48ENTRY(armv5_ec_setttb)
49	/*
50	 * Some other ARM ports save registers on the stack, call the
51	 * idcache_wbinv_all function and then restore the registers from the
52	 * stack before setting the TTB.  I observed that this caused a
53	 * problem when the old and new translation table entries' buffering
54	 * bits were different.  If I saved the registers in other registers
55	 * or invalidated the caches when I returned from idcache_wbinv_all,
56	 * it worked fine.  If not, I ended up executing at an invalid PC.
57	 * For armv5_ec_settb, the idcache_wbinv_all is simple enough, I just
58	 * do it directly and entirely avoid the problem.
59	 */
60	mcr	p15, 0, r0, c7, c5, 0	/* Invalidate ICache */
611:	mrc	p15, 0, r15, c7, c14, 3	/* Test, clean and invalidate DCache */
62	bne	1b			/* More to do? */
63	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
64
65	mcr	p15, 0, r0, c2, c0, 0	/* load new TTB */
66
67	mcr	p15, 0, r0, c8, c7, 0	/* invalidate I+D TLBs */
68	RET
69
70/*
71 * Cache operations.  For the entire cache we use the enhanced cache
72 * operations.
73 */
74
75ENTRY_NP(armv5_ec_icache_sync_range)
76	ldr	ip, .Larmv5_ec_line_size
77	cmp	r1, #0x4000
78	bcs	.Larmv5_ec_icache_sync_all
79	ldr	ip, [ip]
80	sub	r1, r1, #1		/* Don't overrun */
81	sub	r3, ip, #1
82	and	r2, r0, r3
83	add	r1, r1, r2
84	bic	r0, r0, r3
851:
86	mcr	p15, 0, r0, c7, c5, 1	/* Invalidate I cache SE with VA */
87	mcr	p15, 0, r0, c7, c10, 1	/* Clean D cache SE with VA */
88	add	r0, r0, ip
89	subs	r1, r1, ip
90	bpl	1b
91	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
92	RET
93
94ENTRY_NP(armv5_ec_icache_sync_all)
95.Larmv5_ec_icache_sync_all:
96	/*
97	 * We assume that the code here can never be out of sync with the
98	 * dcache, so that we can safely flush the Icache and fall through
99	 * into the Dcache cleaning code.
100	 */
101	mcr	p15, 0, r0, c7, c5, 0	/* Flush I cache */
102	/* Fall through to clean Dcache. */
103
104.Larmv5_ec_dcache_wb:
1051:
106	mrc	p15, 0, r15, c7, c10, 3	/* Test and clean (don't invalidate) */
107	bne	1b			/* More to do? */
108	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
109	RET
110
111.Larmv5_ec_line_size:
112	.word	_C_LABEL(arm_pdcache_line_size)
113
114ENTRY(armv5_ec_dcache_wb_range)
115	ldr	ip, .Larmv5_ec_line_size
116	cmp	r1, #0x4000
117	bcs	.Larmv5_ec_dcache_wb
118	ldr	ip, [ip]
119	sub	r1, r1, #1		/* Don't overrun */
120	sub	r3, ip, #1
121	and	r2, r0, r3
122	add	r1, r1, r2
123	bic	r0, r0, r3
1241:
125	mcr	p15, 0, r0, c7, c10, 1	/* Clean D cache SE with VA */
126	add	r0, r0, ip
127	subs	r1, r1, ip
128	bpl	1b
129	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
130	RET
131
132ENTRY(armv5_ec_dcache_wbinv_range)
133	ldr	ip, .Larmv5_ec_line_size
134	cmp	r1, #0x4000
135	bcs	.Larmv5_ec_dcache_wbinv_all
136	ldr	ip, [ip]
137	sub	r1, r1, #1		/* Don't overrun */
138	sub	r3, ip, #1
139	and	r2, r0, r3
140	add	r1, r1, r2
141	bic	r0, r0, r3
1421:
143	mcr	p15, 0, r0, c7, c14, 1	/* Purge D cache SE with VA */
144	add	r0, r0, ip
145	subs	r1, r1, ip
146	bpl	1b
147	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
148	RET
149
150/*
151 * Note, we must not invalidate everything.  If the range is too big we
152 * must use wb-inv of the entire cache.
153 */
154ENTRY(armv5_ec_dcache_inv_range)
155	ldr	ip, .Larmv5_ec_line_size
156	cmp	r1, #0x4000
157	bcs	.Larmv5_ec_dcache_wbinv_all
158	ldr	ip, [ip]
159	sub	r1, r1, #1		/* Don't overrun */
160	sub	r3, ip, #1
161	and	r2, r0, r3
162	add	r1, r1, r2
163	bic	r0, r0, r3
1641:
165	mcr	p15, 0, r0, c7, c6, 1	/* Invalidate D cache SE with VA */
166	add	r0, r0, ip
167	subs	r1, r1, ip
168	bpl	1b
169	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
170	RET
171
172ENTRY(armv5_ec_idcache_wbinv_range)
173	ldr	ip, .Larmv5_ec_line_size
174	cmp	r1, #0x4000
175	bcs	.Larmv5_ec_idcache_wbinv_all
176	ldr	ip, [ip]
177	sub	r1, r1, #1		/* Don't overrun */
178	sub	r3, ip, #1
179	and	r2, r0, r3
180	add	r1, r1, r2
181	bic	r0, r0, r3
1821:
183	mcr	p15, 0, r0, c7, c5, 1	/* Invalidate I cache SE with VA */
184	mcr	p15, 0, r0, c7, c14, 1	/* Purge D cache SE with VA */
185	add	r0, r0, ip
186	subs	r1, r1, ip
187	bpl	1b
188	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
189	RET
190
191ENTRY_NP(armv5_ec_idcache_wbinv_all)
192.Larmv5_ec_idcache_wbinv_all:
193	/*
194	 * We assume that the code here can never be out of sync with the
195	 * dcache, so that we can safely flush the Icache and fall through
196	 * into the Dcache purging code.
197	 */
198	mcr	p15, 0, r0, c7, c5, 0	/* Invalidate ICache */
199	/* Fall through to purge Dcache. */
200
201ENTRY(armv5_ec_dcache_wbinv_all)
202.Larmv5_ec_dcache_wbinv_all:
2031:	mrc	p15, 0, r15, c7, c14, 3	/* Test, clean and invalidate DCache */
204	bne	1b			/* More to do? */
205	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
206	RET
207
208