cpu-v4.h revision 276340
1/*-
2 * Copyright 2014 Svatopluk Kraus <onwahe@gmail.com>
3 * Copyright 2014 Michal Meloun <meloun@miracle.cz>
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 *    notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 *    notice, this list of conditions and the following disclaimer in the
13 *    documentation and/or other materials provided with the distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25 * SUCH DAMAGE.
26 *
27 * $FreeBSD: head/sys/arm/include/cpu-v6.h 276340 2014-12-28 19:05:32Z ian $
28 */
29#ifndef MACHINE_CPU_V6_H
30#define MACHINE_CPU_V6_H
31
32#include "machine/atomic.h"
33#include "machine/cpufunc.h"
34#include "machine/cpuinfo.h"
35#include "machine/sysreg.h"
36
37
38#define CPU_ASID_KERNEL 0
39
40/*
41 * Macros to generate CP15 (system control processor) read/write functions.
42 */
43#define _FX(s...) #s
44
45#define _RF0(fname, aname...)						\
46static __inline register_t						\
47fname(void)								\
48{									\
49	register_t reg;							\
50	__asm __volatile("mrc\t" _FX(aname): "=r" (reg));		\
51	return(reg);							\
52}
53
54#define _WF0(fname, aname...)						\
55static __inline void							\
56fname(void)								\
57{									\
58	__asm __volatile("mcr\t" _FX(aname));				\
59}
60
61#define _WF1(fname, aname...)						\
62static __inline void							\
63fname(register_t reg)							\
64{									\
65	__asm __volatile("mcr\t" _FX(aname):: "r" (reg));		\
66}
67
68/*
69 * Raw CP15  maintenance operations
70 * !!! not for external use !!!
71 */
72
73/* TLB */
74
75_WF0(_CP15_TLBIALL, CP15_TLBIALL)		/* Invalidate entire unified TLB */
76#if __ARM_ARCH >= 7 && defined SMP
77_WF0(_CP15_TLBIALLIS, CP15_TLBIALLIS)		/* Invalidate entire unified TLB IS */
78#endif
79_WF1(_CP15_TLBIASID, CP15_TLBIASID(%0))		/* Invalidate unified TLB by ASID */
80#if __ARM_ARCH >= 7 && defined SMP
81_WF1(_CP15_TLBIASIDIS, CP15_TLBIASIDIS(%0))	/* Invalidate unified TLB by ASID IS */
82#endif
83_WF1(_CP15_TLBIMVAA, CP15_TLBIMVAA(%0))		/* Invalidate unified TLB by MVA, all ASID */
84#if __ARM_ARCH >= 7 && defined SMP
85_WF1(_CP15_TLBIMVAAIS, CP15_TLBIMVAAIS(%0))	/* Invalidate unified TLB by MVA, all ASID IS */
86#endif
87_WF1(_CP15_TLBIMVA, CP15_TLBIMVA(%0))		/* Invalidate unified TLB by MVA */
88
89_WF1(_CP15_TTB_SET, CP15_TTBR0(%0))
90
91/* Cache and Branch predictor */
92
93_WF0(_CP15_BPIALL, CP15_BPIALL)			/* Branch predictor invalidate all */
94#if __ARM_ARCH >= 7 && defined SMP
95_WF0(_CP15_BPIALLIS, CP15_BPIALLIS)		/* Branch predictor invalidate all IS */
96#endif
97_WF1(_CP15_BPIMVA, CP15_BPIMVA(%0))		/* Branch predictor invalidate by MVA */
98_WF1(_CP15_DCCIMVAC, CP15_DCCIMVAC(%0))		/* Data cache clean and invalidate by MVA PoC */
99_WF1(_CP15_DCCISW, CP15_DCCISW(%0))		/* Data cache clean and invalidate by set/way */
100_WF1(_CP15_DCCMVAC, CP15_DCCMVAC(%0))		/* Data cache clean by MVA PoC */
101#if __ARM_ARCH >= 7
102_WF1(_CP15_DCCMVAU, CP15_DCCMVAU(%0))		/* Data cache clean by MVA PoU */
103#endif
104_WF1(_CP15_DCCSW, CP15_DCCSW(%0))		/* Data cache clean by set/way */
105_WF1(_CP15_DCIMVAC, CP15_DCIMVAC(%0))		/* Data cache invalidate by MVA PoC */
106_WF1(_CP15_DCISW, CP15_DCISW(%0))		/* Data cache invalidate by set/way */
107_WF0(_CP15_ICIALLU, CP15_ICIALLU)		/* Instruction cache invalidate all PoU */
108#if __ARM_ARCH >= 7 && defined SMP
109_WF0(_CP15_ICIALLUIS, CP15_ICIALLUIS)		/* Instruction cache invalidate all PoU IS */
110#endif
111_WF1(_CP15_ICIMVAU, CP15_ICIMVAU(%0))		/* Instruction cache invalidate */
112
113/*
114 * Publicly accessible functions
115 */
116
117/* Various control registers */
118
119_RF0(cp15_dfsr_get, CP15_DFSR(%0))
120_RF0(cp15_ifsr_get, CP15_IFSR(%0))
121_WF1(cp15_prrr_set, CP15_PRRR(%0))
122_WF1(cp15_nmrr_set, CP15_NMRR(%0))
123_RF0(cp15_ttbr_get, CP15_TTBR0(%0))
124_RF0(cp15_dfar_get, CP15_DFAR(%0))
125#if __ARM_ARCH >= 7
126_RF0(cp15_ifar_get, CP15_IFAR(%0))
127#endif
128
129/*CPU id registers */
130_RF0(cp15_midr_get, CP15_MIDR(%0))
131_RF0(cp15_ctr_get, CP15_CTR(%0))
132_RF0(cp15_tcmtr_get, CP15_TCMTR(%0))
133_RF0(cp15_tlbtr_get, CP15_TLBTR(%0))
134_RF0(cp15_mpidr_get, CP15_MPIDR(%0))
135_RF0(cp15_revidr_get, CP15_REVIDR(%0))
136_RF0(cp15_aidr_get, CP15_AIDR(%0))
137_RF0(cp15_id_pfr0_get, CP15_ID_PFR0(%0))
138_RF0(cp15_id_pfr1_get, CP15_ID_PFR1(%0))
139_RF0(cp15_id_dfr0_get, CP15_ID_DFR0(%0))
140_RF0(cp15_id_afr0_get, CP15_ID_AFR0(%0))
141_RF0(cp15_id_mmfr0_get, CP15_ID_MMFR0(%0))
142_RF0(cp15_id_mmfr1_get, CP15_ID_MMFR1(%0))
143_RF0(cp15_id_mmfr2_get, CP15_ID_MMFR2(%0))
144_RF0(cp15_id_mmfr3_get, CP15_ID_MMFR3(%0))
145_RF0(cp15_id_isar0_get, CP15_ID_ISAR0(%0))
146_RF0(cp15_id_isar1_get, CP15_ID_ISAR1(%0))
147_RF0(cp15_id_isar2_get, CP15_ID_ISAR2(%0))
148_RF0(cp15_id_isar3_get, CP15_ID_ISAR3(%0))
149_RF0(cp15_id_isar4_get, CP15_ID_ISAR4(%0))
150_RF0(cp15_id_isar5_get, CP15_ID_ISAR5(%0))
151_RF0(cp15_cbar_get, CP15_CBAR(%0))
152
153#undef	_FX
154#undef	_RF0
155#undef	_WF0
156#undef	_WF1
157
158/*
159 * TLB maintenance operations.
160 */
161
162/* Local (i.e. not broadcasting ) operations.  */
163
164/* Flush all TLB entries (even global). */
165static __inline void
166tlb_flush_all_local(void)
167{
168
169	dsb();
170	_CP15_TLBIALL();
171	dsb();
172}
173
174/* Flush all not global TLB entries. */
175static __inline void
176tlb_flush_all_ng_local(void)
177{
178
179	dsb();
180	_CP15_TLBIASID(CPU_ASID_KERNEL);
181	dsb();
182}
183
184/* Flush single TLB entry (even global). */
185static __inline void
186tlb_flush_local(vm_offset_t sva)
187{
188
189	dsb();
190	_CP15_TLBIMVA((sva & ~PAGE_MASK ) | CPU_ASID_KERNEL);
191	dsb();
192}
193
194/* Flush range of TLB entries (even global). */
195static __inline void
196tlb_flush_range_local(vm_offset_t sva, vm_size_t size)
197{
198	vm_offset_t va;
199	vm_offset_t eva = sva + size;
200
201	dsb();
202	for (va = sva; va < eva; va += PAGE_SIZE)
203		_CP15_TLBIMVA((va & ~PAGE_MASK ) | CPU_ASID_KERNEL);
204	dsb();
205}
206
207/* Broadcasting operations. */
208#ifndef SMP
209
210#define tlb_flush_all() 		tlb_flush_all_local()
211#define tlb_flush_all_ng() 		tlb_flush_all_ng_local()
212#define tlb_flush(sva) 			tlb_flush_local(sva)
213#define tlb_flush_range(sva, size) 	tlb_flush_range_local(sva, size)
214
215#else /* SMP */
216
217static __inline void
218tlb_flush_all(void)
219{
220
221	dsb();
222	_CP15_TLBIALLIS();
223	dsb();
224}
225
226static __inline void
227tlb_flush_all_ng(void)
228{
229
230	dsb();
231	_CP15_TLBIASIDIS(CPU_ASID_KERNEL);
232	dsb();
233}
234
235static __inline void
236tlb_flush(vm_offset_t sva)
237{
238
239	dsb();
240	_CP15_TLBIMVAAIS(sva);
241	dsb();
242}
243
244static __inline void
245tlb_flush_range(vm_offset_t sva,  vm_size_t size)
246{
247	vm_offset_t va;
248	vm_offset_t eva = sva + size;
249
250	dsb();
251	for (va = sva; va < eva; va += PAGE_SIZE)
252		_CP15_TLBIMVAAIS(va);
253	dsb();
254}
255#endif /* SMP */
256
257/*
258 * Cache maintenance operations.
259 */
260
261/*  Sync I and D caches to PoU */
262static __inline void
263icache_sync(vm_offset_t sva, vm_size_t size)
264{
265	vm_offset_t va;
266	vm_offset_t eva = sva + size;
267
268	dsb();
269	for (va = sva; va < eva; va += arm_dcache_align) {
270#ifdef SMP
271		_CP15_DCCMVAU(va);
272#else
273		_CP15_DCCMVAC(va);
274#endif
275	}
276	dsb();
277#ifdef SMP
278	_CP15_ICIALLUIS();
279#else
280	_CP15_ICIALLU();
281#endif
282	dsb();
283	isb();
284}
285
286/*  Invalidate I cache */
287static __inline void
288icache_inv_all(void)
289{
290#ifdef SMP
291	_CP15_ICIALLUIS();
292#else
293	_CP15_ICIALLU();
294#endif
295	dsb();
296	isb();
297}
298
299/* Write back D-cache to PoU */
300static __inline void
301dcache_wb_pou(vm_offset_t sva, vm_size_t size)
302{
303	vm_offset_t va;
304	vm_offset_t eva = sva + size;
305
306	dsb();
307	for (va = sva; va < eva; va += arm_dcache_align) {
308#ifdef SMP
309		_CP15_DCCMVAU(va);
310#else
311		_CP15_DCCMVAC(va);
312#endif
313	}
314	dsb();
315}
316
317/* Invalidate D-cache to PoC */
318static __inline void
319dcache_inv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
320{
321	vm_offset_t va;
322	vm_offset_t eva = sva + size;
323
324	/* invalidate L1 first */
325	for (va = sva; va < eva; va += arm_dcache_align) {
326		_CP15_DCIMVAC(va);
327	}
328	dsb();
329
330	/* then L2 */
331 	cpu_l2cache_inv_range(pa, size);
332	dsb();
333
334	/* then L1 again */
335	for (va = sva; va < eva; va += arm_dcache_align) {
336		_CP15_DCIMVAC(va);
337	}
338	dsb();
339}
340
341/* Write back D-cache to PoC */
342static __inline void
343dcache_wb_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
344{
345	vm_offset_t va;
346	vm_offset_t eva = sva + size;
347
348	dsb();
349
350	for (va = sva; va < eva; va += arm_dcache_align) {
351		_CP15_DCCMVAC(va);
352	}
353	dsb();
354
355	cpu_l2cache_wb_range(pa, size);
356}
357
358/* Write back and invalidate D-cache to PoC */
359static __inline void
360dcache_wbinv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
361{
362	vm_offset_t va;
363	vm_offset_t eva = sva + size;
364
365	dsb();
366
367	/* write back L1 first */
368	for (va = sva; va < eva; va += arm_dcache_align) {
369		_CP15_DCCMVAC(va);
370	}
371	dsb();
372
373	/* then write back and invalidate L2 */
374	cpu_l2cache_wbinv_range(pa, size);
375
376	/* then invalidate L1 */
377	for (va = sva; va < eva; va += arm_dcache_align) {
378		_CP15_DCIMVAC(va);
379	}
380	dsb();
381}
382
383/* Set TTB0 register */
384static __inline void
385cp15_ttbr_set(uint32_t reg)
386{
387	dsb();
388	_CP15_TTB_SET(reg);
389	dsb();
390	_CP15_BPIALL();
391	dsb();
392	isb();
393	tlb_flush_all_ng_local();
394}
395
396#endif /* !MACHINE_CPU_V6_H */
397