1172738Simp/*	$NetBSD: cpufunc_asm_armv5_ec.S,v 1.1 2007/01/06 00:50:54 christos Exp $	*/
2172738Simp
3172738Simp/*
4172738Simp * Copyright (c) 2002, 2005 ARM Limited
5172738Simp * All rights reserved.
6172738Simp *
7172738Simp * Redistribution and use in source and binary forms, with or without
8172738Simp * modification, are permitted provided that the following conditions
9172738Simp * are met:
10172738Simp * 1. Redistributions of source code must retain the above copyright
11172738Simp *    notice, this list of conditions and the following disclaimer.
12172738Simp * 2. Redistributions in binary form must reproduce the above copyright
13172738Simp *    notice, this list of conditions and the following disclaimer in the
14172738Simp *    documentation and/or other materials provided with the distribution.
15172738Simp * 3. The name of the company may not be used to endorse or promote
16172738Simp *    products derived from this software without specific prior written
17172738Simp *    permission.
18172738Simp *
19172738Simp * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
20172738Simp * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21172738Simp * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22172738Simp * IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
23172738Simp * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24172738Simp * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25172738Simp * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
26172738Simp * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
27172738Simp * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
28172738Simp * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29172738Simp * SUCH DAMAGE.
30172738Simp *
31172738Simp * ARMv5 assembly functions for manipulating caches.
32172738Simp * These routines can be used by any core that supports both the set/index
33172738Simp * operations and the test and clean operations for efficiently cleaning the
34172738Simp * entire DCache.  If a core does not have the test and clean operations, but
35172738Simp * does have the set/index operations, use the routines in cpufunc_asm_armv5.S.
36172738Simp * This source was derived from that file.
37172738Simp */
38172738Simp
39172738Simp#include <machine/asm.h>
40172738Simp__FBSDID("$FreeBSD$");
41172738Simp
42172738Simp/*
43172738Simp * Functions to set the MMU Translation Table Base register
44172738Simp *
45172738Simp * We need to clean and flush the cache as it uses virtual
46172738Simp * addresses that are about to change.
47172738Simp */
48172738SimpENTRY(armv5_ec_setttb)
49172738Simp	/*
50172738Simp	 * Some other ARM ports save registers on the stack, call the
51172738Simp	 * idcache_wbinv_all function and then restore the registers from the
52172738Simp	 * stack before setting the TTB.  I observed that this caused a
53172738Simp	 * problem when the old and new translation table entries' buffering
54172738Simp	 * bits were different.  If I saved the registers in other registers
55172738Simp	 * or invalidated the caches when I returned from idcache_wbinv_all,
56172738Simp	 * it worked fine.  If not, I ended up executing at an invalid PC.
57172738Simp	 * For armv5_ec_settb, the idcache_wbinv_all is simple enough, I just
58172738Simp	 * do it directly and entirely avoid the problem.
59172738Simp	 */
60172738Simp	mcr	p15, 0, r0, c7, c5, 0	/* Invalidate ICache */
61275417Sandrew1:	mrc	p15, 0, APSR_nzcv, c7, c14, 3	/* Test, clean and invalidate DCache */
62172738Simp	bne	1b			/* More to do? */
63172738Simp	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
64172738Simp
65172738Simp	mcr	p15, 0, r0, c2, c0, 0	/* load new TTB */
66172738Simp
67172738Simp	mcr	p15, 0, r0, c8, c7, 0	/* invalidate I+D TLBs */
68172738Simp	RET
69248361SandrewEND(armv5_ec_setttb)
70172738Simp
71172738Simp/*
72172738Simp * Cache operations.  For the entire cache we use the enhanced cache
73172738Simp * operations.
74172738Simp */
75172738Simp
76172738SimpENTRY_NP(armv5_ec_icache_sync_range)
77172738Simp	ldr	ip, .Larmv5_ec_line_size
78172738Simp	cmp	r1, #0x4000
79172738Simp	bcs	.Larmv5_ec_icache_sync_all
80172738Simp	ldr	ip, [ip]
81172738Simp	sub	r1, r1, #1		/* Don't overrun */
82172738Simp	sub	r3, ip, #1
83172738Simp	and	r2, r0, r3
84172738Simp	add	r1, r1, r2
85172738Simp	bic	r0, r0, r3
86172738Simp1:
87172738Simp	mcr	p15, 0, r0, c7, c5, 1	/* Invalidate I cache SE with VA */
88172738Simp	mcr	p15, 0, r0, c7, c10, 1	/* Clean D cache SE with VA */
89172738Simp	add	r0, r0, ip
90172738Simp	subs	r1, r1, ip
91172738Simp	bpl	1b
92172738Simp	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
93172738Simp	RET
94172738Simp
95172738Simp.Larmv5_ec_icache_sync_all:
96172738Simp	/*
97172738Simp	 * We assume that the code here can never be out of sync with the
98172738Simp	 * dcache, so that we can safely flush the Icache and fall through
99172738Simp	 * into the Dcache cleaning code.
100172738Simp	 */
101172738Simp	mcr	p15, 0, r0, c7, c5, 0	/* Flush I cache */
102172738Simp	/* Fall through to clean Dcache. */
103172738Simp
104172738Simp.Larmv5_ec_dcache_wb:
105172738Simp1:
106275417Sandrew	mrc	p15, 0, APSR_nzcv, c7, c10, 3	/* Test and clean (don't invalidate) */
107172738Simp	bne	1b			/* More to do? */
108172738Simp	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
109172738Simp	RET
110295207SmmelEND(armv5_ec_icache_sync_range)
111172738Simp
112172738Simp.Larmv5_ec_line_size:
113172738Simp	.word	_C_LABEL(arm_pdcache_line_size)
114172738Simp
115172738SimpENTRY(armv5_ec_dcache_wb_range)
116172738Simp	ldr	ip, .Larmv5_ec_line_size
117172738Simp	cmp	r1, #0x4000
118172738Simp	bcs	.Larmv5_ec_dcache_wb
119172738Simp	ldr	ip, [ip]
120172738Simp	sub	r1, r1, #1		/* Don't overrun */
121172738Simp	sub	r3, ip, #1
122172738Simp	and	r2, r0, r3
123172738Simp	add	r1, r1, r2
124172738Simp	bic	r0, r0, r3
125172738Simp1:
126172738Simp	mcr	p15, 0, r0, c7, c10, 1	/* Clean D cache SE with VA */
127172738Simp	add	r0, r0, ip
128172738Simp	subs	r1, r1, ip
129172738Simp	bpl	1b
130172738Simp	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
131172738Simp	RET
132248361SandrewEND(armv5_ec_dcache_wb_range)
133172738Simp
134172738SimpENTRY(armv5_ec_dcache_wbinv_range)
135172738Simp	ldr	ip, .Larmv5_ec_line_size
136172738Simp	cmp	r1, #0x4000
137172738Simp	bcs	.Larmv5_ec_dcache_wbinv_all
138172738Simp	ldr	ip, [ip]
139172738Simp	sub	r1, r1, #1		/* Don't overrun */
140172738Simp	sub	r3, ip, #1
141172738Simp	and	r2, r0, r3
142172738Simp	add	r1, r1, r2
143172738Simp	bic	r0, r0, r3
144172738Simp1:
145172738Simp	mcr	p15, 0, r0, c7, c14, 1	/* Purge D cache SE with VA */
146172738Simp	add	r0, r0, ip
147172738Simp	subs	r1, r1, ip
148172738Simp	bpl	1b
149172738Simp	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
150172738Simp	RET
151248361SandrewEND(armv5_ec_dcache_wbinv_range)
152172738Simp
153172738Simp/*
154172738Simp * Note, we must not invalidate everything.  If the range is too big we
155172738Simp * must use wb-inv of the entire cache.
156172738Simp */
157172738SimpENTRY(armv5_ec_dcache_inv_range)
158172738Simp	ldr	ip, .Larmv5_ec_line_size
159172738Simp	cmp	r1, #0x4000
160172738Simp	bcs	.Larmv5_ec_dcache_wbinv_all
161172738Simp	ldr	ip, [ip]
162172738Simp	sub	r1, r1, #1		/* Don't overrun */
163172738Simp	sub	r3, ip, #1
164172738Simp	and	r2, r0, r3
165172738Simp	add	r1, r1, r2
166172738Simp	bic	r0, r0, r3
167172738Simp1:
168172738Simp	mcr	p15, 0, r0, c7, c6, 1	/* Invalidate D cache SE with VA */
169172738Simp	add	r0, r0, ip
170172738Simp	subs	r1, r1, ip
171172738Simp	bpl	1b
172172738Simp	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
173172738Simp	RET
174248361SandrewEND(armv5_ec_dcache_inv_range)
175172738Simp
176172738SimpENTRY(armv5_ec_idcache_wbinv_range)
177172738Simp	ldr	ip, .Larmv5_ec_line_size
178172738Simp	cmp	r1, #0x4000
179172738Simp	bcs	.Larmv5_ec_idcache_wbinv_all
180172738Simp	ldr	ip, [ip]
181172738Simp	sub	r1, r1, #1		/* Don't overrun */
182172738Simp	sub	r3, ip, #1
183172738Simp	and	r2, r0, r3
184172738Simp	add	r1, r1, r2
185172738Simp	bic	r0, r0, r3
186172738Simp1:
187172738Simp	mcr	p15, 0, r0, c7, c5, 1	/* Invalidate I cache SE with VA */
188172738Simp	mcr	p15, 0, r0, c7, c14, 1	/* Purge D cache SE with VA */
189172738Simp	add	r0, r0, ip
190172738Simp	subs	r1, r1, ip
191172738Simp	bpl	1b
192172738Simp	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
193172738Simp	RET
194248361SandrewEND(armv5_ec_idcache_wbinv_range)
195172738Simp
196172738SimpENTRY_NP(armv5_ec_idcache_wbinv_all)
197172738Simp.Larmv5_ec_idcache_wbinv_all:
198172738Simp	/*
199172738Simp	 * We assume that the code here can never be out of sync with the
200172738Simp	 * dcache, so that we can safely flush the Icache and fall through
201172738Simp	 * into the Dcache purging code.
202172738Simp	 */
203172738Simp	mcr	p15, 0, r0, c7, c5, 0	/* Invalidate ICache */
204172738Simp	/* Fall through to purge Dcache. */
205248361SandrewEND(armv5_ec_idcache_wbinv_all)
206172738Simp
207172738SimpENTRY(armv5_ec_dcache_wbinv_all)
208172738Simp.Larmv5_ec_dcache_wbinv_all:
209275417Sandrew1:	mrc	p15, 0, APSR_nzcv, c7, c14, 3	/* Test, clean and invalidate DCache */
210172738Simp	bne	1b			/* More to do? */
211172738Simp	mcr	p15, 0, r0, c7, c10, 4	/* drain the write buffer */
212172738Simp	RET
213248361SandrewEND(armv5_ec_dcache_wbinv_all)
214191141Sraj
215