cpu-v4.h revision 276803
1/*-
2 * Copyright 2014 Svatopluk Kraus <onwahe@gmail.com>
3 * Copyright 2014 Michal Meloun <meloun@miracle.cz>
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 *    notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 *    notice, this list of conditions and the following disclaimer in the
13 *    documentation and/or other materials provided with the distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25 * SUCH DAMAGE.
26 *
27 * $FreeBSD: head/sys/arm/include/cpu-v6.h 276803 2015-01-08 01:28:46Z ian $
28 */
29#ifndef MACHINE_CPU_V6_H
30#define MACHINE_CPU_V6_H
31
32#include "machine/atomic.h"
33#include "machine/cpufunc.h"
34#include "machine/cpuinfo.h"
35#include "machine/sysreg.h"
36
37
38#define CPU_ASID_KERNEL 0
39
40/*
41 * Macros to generate CP15 (system control processor) read/write functions.
42 */
43#define _FX(s...) #s
44
45#define _RF0(fname, aname...)						\
46static __inline register_t						\
47fname(void)								\
48{									\
49	register_t reg;							\
50	__asm __volatile("mrc\t" _FX(aname): "=r" (reg));		\
51	return(reg);							\
52}
53
54#define _WF0(fname, aname...)						\
55static __inline void							\
56fname(void)								\
57{									\
58	__asm __volatile("mcr\t" _FX(aname));				\
59}
60
61#define _WF1(fname, aname...)						\
62static __inline void							\
63fname(register_t reg)							\
64{									\
65	__asm __volatile("mcr\t" _FX(aname):: "r" (reg));		\
66}
67
68/*
69 * Raw CP15  maintenance operations
70 * !!! not for external use !!!
71 */
72
73/* TLB */
74
75_WF0(_CP15_TLBIALL, CP15_TLBIALL)		/* Invalidate entire unified TLB */
76#if __ARM_ARCH >= 7 && defined SMP
77_WF0(_CP15_TLBIALLIS, CP15_TLBIALLIS)		/* Invalidate entire unified TLB IS */
78#endif
79_WF1(_CP15_TLBIASID, CP15_TLBIASID(%0))		/* Invalidate unified TLB by ASID */
80#if __ARM_ARCH >= 7 && defined SMP
81_WF1(_CP15_TLBIASIDIS, CP15_TLBIASIDIS(%0))	/* Invalidate unified TLB by ASID IS */
82#endif
83_WF1(_CP15_TLBIMVAA, CP15_TLBIMVAA(%0))		/* Invalidate unified TLB by MVA, all ASID */
84#if __ARM_ARCH >= 7 && defined SMP
85_WF1(_CP15_TLBIMVAAIS, CP15_TLBIMVAAIS(%0))	/* Invalidate unified TLB by MVA, all ASID IS */
86#endif
87_WF1(_CP15_TLBIMVA, CP15_TLBIMVA(%0))		/* Invalidate unified TLB by MVA */
88
89_WF1(_CP15_TTB_SET, CP15_TTBR0(%0))
90
91/* Cache and Branch predictor */
92
93_WF0(_CP15_BPIALL, CP15_BPIALL)			/* Branch predictor invalidate all */
94#if __ARM_ARCH >= 7 && defined SMP
95_WF0(_CP15_BPIALLIS, CP15_BPIALLIS)		/* Branch predictor invalidate all IS */
96#endif
97_WF1(_CP15_BPIMVA, CP15_BPIMVA(%0))		/* Branch predictor invalidate by MVA */
98_WF1(_CP15_DCCIMVAC, CP15_DCCIMVAC(%0))		/* Data cache clean and invalidate by MVA PoC */
99_WF1(_CP15_DCCISW, CP15_DCCISW(%0))		/* Data cache clean and invalidate by set/way */
100_WF1(_CP15_DCCMVAC, CP15_DCCMVAC(%0))		/* Data cache clean by MVA PoC */
101#if __ARM_ARCH >= 7
102_WF1(_CP15_DCCMVAU, CP15_DCCMVAU(%0))		/* Data cache clean by MVA PoU */
103#endif
104_WF1(_CP15_DCCSW, CP15_DCCSW(%0))		/* Data cache clean by set/way */
105_WF1(_CP15_DCIMVAC, CP15_DCIMVAC(%0))		/* Data cache invalidate by MVA PoC */
106_WF1(_CP15_DCISW, CP15_DCISW(%0))		/* Data cache invalidate by set/way */
107_WF0(_CP15_ICIALLU, CP15_ICIALLU)		/* Instruction cache invalidate all PoU */
108#if __ARM_ARCH >= 7 && defined SMP
109_WF0(_CP15_ICIALLUIS, CP15_ICIALLUIS)		/* Instruction cache invalidate all PoU IS */
110#endif
111_WF1(_CP15_ICIMVAU, CP15_ICIMVAU(%0))		/* Instruction cache invalidate */
112
113/*
114 * Publicly accessible functions
115 */
116
117/* Various control registers */
118
119_RF0(cp15_dfsr_get, CP15_DFSR(%0))
120_RF0(cp15_ifsr_get, CP15_IFSR(%0))
121_WF1(cp15_prrr_set, CP15_PRRR(%0))
122_WF1(cp15_nmrr_set, CP15_NMRR(%0))
123_RF0(cp15_ttbr_get, CP15_TTBR0(%0))
124_RF0(cp15_dfar_get, CP15_DFAR(%0))
125#if __ARM_ARCH >= 7
126_RF0(cp15_ifar_get, CP15_IFAR(%0))
127#endif
128
129/*CPU id registers */
130_RF0(cp15_midr_get, CP15_MIDR(%0))
131_RF0(cp15_ctr_get, CP15_CTR(%0))
132_RF0(cp15_tcmtr_get, CP15_TCMTR(%0))
133_RF0(cp15_tlbtr_get, CP15_TLBTR(%0))
134_RF0(cp15_mpidr_get, CP15_MPIDR(%0))
135_RF0(cp15_revidr_get, CP15_REVIDR(%0))
136_RF0(cp15_aidr_get, CP15_AIDR(%0))
137_RF0(cp15_id_pfr0_get, CP15_ID_PFR0(%0))
138_RF0(cp15_id_pfr1_get, CP15_ID_PFR1(%0))
139_RF0(cp15_id_dfr0_get, CP15_ID_DFR0(%0))
140_RF0(cp15_id_afr0_get, CP15_ID_AFR0(%0))
141_RF0(cp15_id_mmfr0_get, CP15_ID_MMFR0(%0))
142_RF0(cp15_id_mmfr1_get, CP15_ID_MMFR1(%0))
143_RF0(cp15_id_mmfr2_get, CP15_ID_MMFR2(%0))
144_RF0(cp15_id_mmfr3_get, CP15_ID_MMFR3(%0))
145_RF0(cp15_id_isar0_get, CP15_ID_ISAR0(%0))
146_RF0(cp15_id_isar1_get, CP15_ID_ISAR1(%0))
147_RF0(cp15_id_isar2_get, CP15_ID_ISAR2(%0))
148_RF0(cp15_id_isar3_get, CP15_ID_ISAR3(%0))
149_RF0(cp15_id_isar4_get, CP15_ID_ISAR4(%0))
150_RF0(cp15_id_isar5_get, CP15_ID_ISAR5(%0))
151_RF0(cp15_cbar_get, CP15_CBAR(%0))
152
153/* Performance Monitor registers */
154
155#if __ARM_ARCH == 6 && defined(CPU_ARM1176)
156_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0))
157_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0))
158#elif __ARM_ARCH > 6
159_RF0(cp15_pmcr_get, CP15_PMCR(%0))
160_WF1(cp15_pmcr_set, CP15_PMCR(%0))
161_RF0(cp15_pmcnten_get, CP15_PMCNTENSET(%0))
162_WF1(cp15_pmcnten_set, CP15_PMCNTENSET(%0))
163_WF1(cp15_pmcnten_clr, CP15_PMCNTENCLR(%0))
164_RF0(cp15_pmovsr_get, CP15_PMOVSR(%0))
165_WF1(cp15_pmovsr_set, CP15_PMOVSR(%0))
166_WF1(cp15_pmswinc_set, CP15_PMSWINC(%0))
167_RF0(cp15_pmselr_get, CP15_PMSELR(%0))
168_WF1(cp15_pmselr_set, CP15_PMSELR(%0))
169_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0))
170_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0))
171_RF0(cp15_pmxevtyper_get, CP15_PMXEVTYPER(%0))
172_WF1(cp15_pmxevtyper_set, CP15_PMXEVTYPER(%0))
173_RF0(cp15_pmxevcntr_get, CP15_PMXEVCNTRR(%0))
174_WF1(cp15_pmxevcntr_set, CP15_PMXEVCNTRR(%0))
175_RF0(cp15_pmuserenr_get, CP15_PMUSERENR(%0))
176_WF1(cp15_pmuserenr_set, CP15_PMUSERENR(%0))
177_RF0(cp15_pminten_get, CP15_PMINTENSET(%0))
178_WF1(cp15_pminten_set, CP15_PMINTENSET(%0))
179_WF1(cp15_pminten_clr, CP15_PMINTENCLR(%0))
180#endif
181
182#undef	_FX
183#undef	_RF0
184#undef	_WF0
185#undef	_WF1
186
187/*
188 * TLB maintenance operations.
189 */
190
191/* Local (i.e. not broadcasting ) operations.  */
192
193/* Flush all TLB entries (even global). */
194static __inline void
195tlb_flush_all_local(void)
196{
197
198	dsb();
199	_CP15_TLBIALL();
200	dsb();
201}
202
203/* Flush all not global TLB entries. */
204static __inline void
205tlb_flush_all_ng_local(void)
206{
207
208	dsb();
209	_CP15_TLBIASID(CPU_ASID_KERNEL);
210	dsb();
211}
212
213/* Flush single TLB entry (even global). */
214static __inline void
215tlb_flush_local(vm_offset_t sva)
216{
217
218	dsb();
219	_CP15_TLBIMVA((sva & ~PAGE_MASK ) | CPU_ASID_KERNEL);
220	dsb();
221}
222
223/* Flush range of TLB entries (even global). */
224static __inline void
225tlb_flush_range_local(vm_offset_t sva, vm_size_t size)
226{
227	vm_offset_t va;
228	vm_offset_t eva = sva + size;
229
230	dsb();
231	for (va = sva; va < eva; va += PAGE_SIZE)
232		_CP15_TLBIMVA((va & ~PAGE_MASK ) | CPU_ASID_KERNEL);
233	dsb();
234}
235
236/* Broadcasting operations. */
237#if __ARM_ARCH >= 7 && defined SMP
238
239static __inline void
240tlb_flush_all(void)
241{
242
243	dsb();
244	_CP15_TLBIALLIS();
245	dsb();
246}
247
248static __inline void
249tlb_flush_all_ng(void)
250{
251
252	dsb();
253	_CP15_TLBIASIDIS(CPU_ASID_KERNEL);
254	dsb();
255}
256
257static __inline void
258tlb_flush(vm_offset_t sva)
259{
260
261	dsb();
262	_CP15_TLBIMVAAIS(sva);
263	dsb();
264}
265
266static __inline void
267tlb_flush_range(vm_offset_t sva,  vm_size_t size)
268{
269	vm_offset_t va;
270	vm_offset_t eva = sva + size;
271
272	dsb();
273	for (va = sva; va < eva; va += PAGE_SIZE)
274		_CP15_TLBIMVAAIS(va);
275	dsb();
276}
277#else /* SMP */
278
279#define tlb_flush_all() 		tlb_flush_all_local()
280#define tlb_flush_all_ng() 		tlb_flush_all_ng_local()
281#define tlb_flush(sva) 			tlb_flush_local(sva)
282#define tlb_flush_range(sva, size) 	tlb_flush_range_local(sva, size)
283
284#endif /* SMP */
285
286/*
287 * Cache maintenance operations.
288 */
289
290/*  Sync I and D caches to PoU */
291static __inline void
292icache_sync(vm_offset_t sva, vm_size_t size)
293{
294	vm_offset_t va;
295	vm_offset_t eva = sva + size;
296
297	dsb();
298	for (va = sva; va < eva; va += arm_dcache_align) {
299#if __ARM_ARCH >= 7 && defined SMP
300		_CP15_DCCMVAU(va);
301#else
302		_CP15_DCCMVAC(va);
303#endif
304	}
305	dsb();
306#if __ARM_ARCH >= 7 && defined SMP
307	_CP15_ICIALLUIS();
308#else
309	_CP15_ICIALLU();
310#endif
311	dsb();
312	isb();
313}
314
315/*  Invalidate I cache */
316static __inline void
317icache_inv_all(void)
318{
319#if __ARM_ARCH >= 7 && defined SMP
320	_CP15_ICIALLUIS();
321#else
322	_CP15_ICIALLU();
323#endif
324	dsb();
325	isb();
326}
327
328/* Write back D-cache to PoU */
329static __inline void
330dcache_wb_pou(vm_offset_t sva, vm_size_t size)
331{
332	vm_offset_t va;
333	vm_offset_t eva = sva + size;
334
335	dsb();
336	for (va = sva; va < eva; va += arm_dcache_align) {
337#if __ARM_ARCH >= 7 && defined SMP
338		_CP15_DCCMVAU(va);
339#else
340		_CP15_DCCMVAC(va);
341#endif
342	}
343	dsb();
344}
345
346/* Invalidate D-cache to PoC */
347static __inline void
348dcache_inv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
349{
350	vm_offset_t va;
351	vm_offset_t eva = sva + size;
352
353	/* invalidate L1 first */
354	for (va = sva; va < eva; va += arm_dcache_align) {
355		_CP15_DCIMVAC(va);
356	}
357	dsb();
358
359	/* then L2 */
360 	cpu_l2cache_inv_range(pa, size);
361	dsb();
362
363	/* then L1 again */
364	for (va = sva; va < eva; va += arm_dcache_align) {
365		_CP15_DCIMVAC(va);
366	}
367	dsb();
368}
369
370/* Write back D-cache to PoC */
371static __inline void
372dcache_wb_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
373{
374	vm_offset_t va;
375	vm_offset_t eva = sva + size;
376
377	dsb();
378
379	for (va = sva; va < eva; va += arm_dcache_align) {
380		_CP15_DCCMVAC(va);
381	}
382	dsb();
383
384	cpu_l2cache_wb_range(pa, size);
385}
386
387/* Write back and invalidate D-cache to PoC */
388static __inline void
389dcache_wbinv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
390{
391	vm_offset_t va;
392	vm_offset_t eva = sva + size;
393
394	dsb();
395
396	/* write back L1 first */
397	for (va = sva; va < eva; va += arm_dcache_align) {
398		_CP15_DCCMVAC(va);
399	}
400	dsb();
401
402	/* then write back and invalidate L2 */
403	cpu_l2cache_wbinv_range(pa, size);
404
405	/* then invalidate L1 */
406	for (va = sva; va < eva; va += arm_dcache_align) {
407		_CP15_DCIMVAC(va);
408	}
409	dsb();
410}
411
412/* Set TTB0 register */
413static __inline void
414cp15_ttbr_set(uint32_t reg)
415{
416	dsb();
417	_CP15_TTB_SET(reg);
418	dsb();
419	_CP15_BPIALL();
420	dsb();
421	isb();
422	tlb_flush_all_ng_local();
423}
424
425#endif /* !MACHINE_CPU_V6_H */
426