Deleted Added
full compact
cpu-v6.h (277415) cpu-v6.h (279811)
1/*-
2 * Copyright 2014 Svatopluk Kraus <onwahe@gmail.com>
3 * Copyright 2014 Michal Meloun <meloun@miracle.cz>
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25 * SUCH DAMAGE.
26 *
1/*-
2 * Copyright 2014 Svatopluk Kraus <onwahe@gmail.com>
3 * Copyright 2014 Michal Meloun <meloun@miracle.cz>
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25 * SUCH DAMAGE.
26 *
27 * $FreeBSD: head/sys/arm/include/cpu-v6.h 277415 2015-01-20 11:11:32Z andrew $
27 * $FreeBSD: head/sys/arm/include/cpu-v6.h 279811 2015-03-09 14:46:10Z ian $
28 */
29#ifndef MACHINE_CPU_V6_H
30#define MACHINE_CPU_V6_H
31
32#include "machine/atomic.h"
33#include "machine/cpufunc.h"
34#include "machine/cpuinfo.h"
35#include "machine/sysreg.h"
36
37
38#define CPU_ASID_KERNEL 0
39
28 */
29#ifndef MACHINE_CPU_V6_H
30#define MACHINE_CPU_V6_H
31
32#include "machine/atomic.h"
33#include "machine/cpufunc.h"
34#include "machine/cpuinfo.h"
35#include "machine/sysreg.h"
36
37
38#define CPU_ASID_KERNEL 0
39
40vm_offset_t dcache_wb_pou_checked(vm_offset_t, vm_size_t);
41vm_offset_t icache_inv_pou_checked(vm_offset_t, vm_size_t);
42
40/*
41 * Macros to generate CP15 (system control processor) read/write functions.
42 */
43#define _FX(s...) #s
44
45#define _RF0(fname, aname...) \
46static __inline register_t \
47fname(void) \
48{ \
49 register_t reg; \
50 __asm __volatile("mrc\t" _FX(aname): "=r" (reg)); \
51 return(reg); \
52}
53
54#define _WF0(fname, aname...) \
55static __inline void \
56fname(void) \
57{ \
58 __asm __volatile("mcr\t" _FX(aname)); \
59}
60
61#define _WF1(fname, aname...) \
62static __inline void \
63fname(register_t reg) \
64{ \
65 __asm __volatile("mcr\t" _FX(aname):: "r" (reg)); \
66}
67
68/*
69 * Raw CP15 maintenance operations
70 * !!! not for external use !!!
71 */
72
73/* TLB */
74
75_WF0(_CP15_TLBIALL, CP15_TLBIALL) /* Invalidate entire unified TLB */
76#if __ARM_ARCH >= 7 && defined SMP
77_WF0(_CP15_TLBIALLIS, CP15_TLBIALLIS) /* Invalidate entire unified TLB IS */
78#endif
79_WF1(_CP15_TLBIASID, CP15_TLBIASID(%0)) /* Invalidate unified TLB by ASID */
80#if __ARM_ARCH >= 7 && defined SMP
81_WF1(_CP15_TLBIASIDIS, CP15_TLBIASIDIS(%0)) /* Invalidate unified TLB by ASID IS */
82#endif
83_WF1(_CP15_TLBIMVAA, CP15_TLBIMVAA(%0)) /* Invalidate unified TLB by MVA, all ASID */
84#if __ARM_ARCH >= 7 && defined SMP
85_WF1(_CP15_TLBIMVAAIS, CP15_TLBIMVAAIS(%0)) /* Invalidate unified TLB by MVA, all ASID IS */
86#endif
87_WF1(_CP15_TLBIMVA, CP15_TLBIMVA(%0)) /* Invalidate unified TLB by MVA */
88
89_WF1(_CP15_TTB_SET, CP15_TTBR0(%0))
90
91/* Cache and Branch predictor */
92
93_WF0(_CP15_BPIALL, CP15_BPIALL) /* Branch predictor invalidate all */
94#if __ARM_ARCH >= 7 && defined SMP
95_WF0(_CP15_BPIALLIS, CP15_BPIALLIS) /* Branch predictor invalidate all IS */
96#endif
97_WF1(_CP15_BPIMVA, CP15_BPIMVA(%0)) /* Branch predictor invalidate by MVA */
98_WF1(_CP15_DCCIMVAC, CP15_DCCIMVAC(%0)) /* Data cache clean and invalidate by MVA PoC */
99_WF1(_CP15_DCCISW, CP15_DCCISW(%0)) /* Data cache clean and invalidate by set/way */
100_WF1(_CP15_DCCMVAC, CP15_DCCMVAC(%0)) /* Data cache clean by MVA PoC */
101#if __ARM_ARCH >= 7
102_WF1(_CP15_DCCMVAU, CP15_DCCMVAU(%0)) /* Data cache clean by MVA PoU */
103#endif
104_WF1(_CP15_DCCSW, CP15_DCCSW(%0)) /* Data cache clean by set/way */
105_WF1(_CP15_DCIMVAC, CP15_DCIMVAC(%0)) /* Data cache invalidate by MVA PoC */
106_WF1(_CP15_DCISW, CP15_DCISW(%0)) /* Data cache invalidate by set/way */
107_WF0(_CP15_ICIALLU, CP15_ICIALLU) /* Instruction cache invalidate all PoU */
108#if __ARM_ARCH >= 7 && defined SMP
109_WF0(_CP15_ICIALLUIS, CP15_ICIALLUIS) /* Instruction cache invalidate all PoU IS */
110#endif
111_WF1(_CP15_ICIMVAU, CP15_ICIMVAU(%0)) /* Instruction cache invalidate */
112
113/*
114 * Publicly accessible functions
115 */
116
117/* Various control registers */
118
119_RF0(cp15_dfsr_get, CP15_DFSR(%0))
120_RF0(cp15_ifsr_get, CP15_IFSR(%0))
121_WF1(cp15_prrr_set, CP15_PRRR(%0))
122_WF1(cp15_nmrr_set, CP15_NMRR(%0))
123_RF0(cp15_ttbr_get, CP15_TTBR0(%0))
124_RF0(cp15_dfar_get, CP15_DFAR(%0))
125#if __ARM_ARCH >= 7
126_RF0(cp15_ifar_get, CP15_IFAR(%0))
127#endif
128
129/*CPU id registers */
130_RF0(cp15_midr_get, CP15_MIDR(%0))
131_RF0(cp15_ctr_get, CP15_CTR(%0))
132_RF0(cp15_tcmtr_get, CP15_TCMTR(%0))
133_RF0(cp15_tlbtr_get, CP15_TLBTR(%0))
134_RF0(cp15_mpidr_get, CP15_MPIDR(%0))
135_RF0(cp15_revidr_get, CP15_REVIDR(%0))
136_RF0(cp15_aidr_get, CP15_AIDR(%0))
137_RF0(cp15_id_pfr0_get, CP15_ID_PFR0(%0))
138_RF0(cp15_id_pfr1_get, CP15_ID_PFR1(%0))
139_RF0(cp15_id_dfr0_get, CP15_ID_DFR0(%0))
140_RF0(cp15_id_afr0_get, CP15_ID_AFR0(%0))
141_RF0(cp15_id_mmfr0_get, CP15_ID_MMFR0(%0))
142_RF0(cp15_id_mmfr1_get, CP15_ID_MMFR1(%0))
143_RF0(cp15_id_mmfr2_get, CP15_ID_MMFR2(%0))
144_RF0(cp15_id_mmfr3_get, CP15_ID_MMFR3(%0))
145_RF0(cp15_id_isar0_get, CP15_ID_ISAR0(%0))
146_RF0(cp15_id_isar1_get, CP15_ID_ISAR1(%0))
147_RF0(cp15_id_isar2_get, CP15_ID_ISAR2(%0))
148_RF0(cp15_id_isar3_get, CP15_ID_ISAR3(%0))
149_RF0(cp15_id_isar4_get, CP15_ID_ISAR4(%0))
150_RF0(cp15_id_isar5_get, CP15_ID_ISAR5(%0))
151_RF0(cp15_cbar_get, CP15_CBAR(%0))
152
153/* Performance Monitor registers */
154
155#if __ARM_ARCH == 6 && defined(CPU_ARM1176)
156_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0))
157_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0))
158#elif __ARM_ARCH > 6
159_RF0(cp15_pmcr_get, CP15_PMCR(%0))
160_WF1(cp15_pmcr_set, CP15_PMCR(%0))
161_RF0(cp15_pmcnten_get, CP15_PMCNTENSET(%0))
162_WF1(cp15_pmcnten_set, CP15_PMCNTENSET(%0))
163_WF1(cp15_pmcnten_clr, CP15_PMCNTENCLR(%0))
164_RF0(cp15_pmovsr_get, CP15_PMOVSR(%0))
165_WF1(cp15_pmovsr_set, CP15_PMOVSR(%0))
166_WF1(cp15_pmswinc_set, CP15_PMSWINC(%0))
167_RF0(cp15_pmselr_get, CP15_PMSELR(%0))
168_WF1(cp15_pmselr_set, CP15_PMSELR(%0))
169_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0))
170_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0))
171_RF0(cp15_pmxevtyper_get, CP15_PMXEVTYPER(%0))
172_WF1(cp15_pmxevtyper_set, CP15_PMXEVTYPER(%0))
173_RF0(cp15_pmxevcntr_get, CP15_PMXEVCNTRR(%0))
174_WF1(cp15_pmxevcntr_set, CP15_PMXEVCNTRR(%0))
175_RF0(cp15_pmuserenr_get, CP15_PMUSERENR(%0))
176_WF1(cp15_pmuserenr_set, CP15_PMUSERENR(%0))
177_RF0(cp15_pminten_get, CP15_PMINTENSET(%0))
178_WF1(cp15_pminten_set, CP15_PMINTENSET(%0))
179_WF1(cp15_pminten_clr, CP15_PMINTENCLR(%0))
180#endif
181
182_RF0(cp15_tpidrurw_get, CP15_TPIDRURW(%0))
183_WF1(cp15_tpidrurw_set, CP15_TPIDRURW(%0))
184_RF0(cp15_tpidruro_get, CP15_TPIDRURO(%0))
185_WF1(cp15_tpidruro_set, CP15_TPIDRURO(%0))
186_RF0(cp15_tpidrpwr_get, CP15_TPIDRPRW(%0))
187_WF1(cp15_tpidrpwr_set, CP15_TPIDRPRW(%0))
188
189#undef _FX
190#undef _RF0
191#undef _WF0
192#undef _WF1
193
194/*
195 * TLB maintenance operations.
196 */
197
198/* Local (i.e. not broadcasting ) operations. */
199
200/* Flush all TLB entries (even global). */
201static __inline void
202tlb_flush_all_local(void)
203{
204
205 dsb();
206 _CP15_TLBIALL();
207 dsb();
208}
209
210/* Flush all not global TLB entries. */
211static __inline void
212tlb_flush_all_ng_local(void)
213{
214
215 dsb();
216 _CP15_TLBIASID(CPU_ASID_KERNEL);
217 dsb();
218}
219
220/* Flush single TLB entry (even global). */
221static __inline void
222tlb_flush_local(vm_offset_t sva)
223{
224
225 dsb();
226 _CP15_TLBIMVA((sva & ~PAGE_MASK ) | CPU_ASID_KERNEL);
227 dsb();
228}
229
230/* Flush range of TLB entries (even global). */
231static __inline void
232tlb_flush_range_local(vm_offset_t sva, vm_size_t size)
233{
234 vm_offset_t va;
235 vm_offset_t eva = sva + size;
236
237 dsb();
238 for (va = sva; va < eva; va += PAGE_SIZE)
239 _CP15_TLBIMVA((va & ~PAGE_MASK ) | CPU_ASID_KERNEL);
240 dsb();
241}
242
243/* Broadcasting operations. */
244#if __ARM_ARCH >= 7 && defined SMP
245
246static __inline void
247tlb_flush_all(void)
248{
249
250 dsb();
251 _CP15_TLBIALLIS();
252 dsb();
253}
254
255static __inline void
256tlb_flush_all_ng(void)
257{
258
259 dsb();
260 _CP15_TLBIASIDIS(CPU_ASID_KERNEL);
261 dsb();
262}
263
264static __inline void
265tlb_flush(vm_offset_t sva)
266{
267
268 dsb();
269 _CP15_TLBIMVAAIS(sva);
270 dsb();
271}
272
273static __inline void
274tlb_flush_range(vm_offset_t sva, vm_size_t size)
275{
276 vm_offset_t va;
277 vm_offset_t eva = sva + size;
278
279 dsb();
280 for (va = sva; va < eva; va += PAGE_SIZE)
281 _CP15_TLBIMVAAIS(va);
282 dsb();
283}
284#else /* SMP */
285
286#define tlb_flush_all() tlb_flush_all_local()
287#define tlb_flush_all_ng() tlb_flush_all_ng_local()
288#define tlb_flush(sva) tlb_flush_local(sva)
289#define tlb_flush_range(sva, size) tlb_flush_range_local(sva, size)
290
291#endif /* SMP */
292
293/*
294 * Cache maintenance operations.
295 */
296
297/* Sync I and D caches to PoU */
298static __inline void
299icache_sync(vm_offset_t sva, vm_size_t size)
300{
301 vm_offset_t va;
302 vm_offset_t eva = sva + size;
303
304 dsb();
43/*
44 * Macros to generate CP15 (system control processor) read/write functions.
45 */
46#define _FX(s...) #s
47
48#define _RF0(fname, aname...) \
49static __inline register_t \
50fname(void) \
51{ \
52 register_t reg; \
53 __asm __volatile("mrc\t" _FX(aname): "=r" (reg)); \
54 return(reg); \
55}
56
57#define _WF0(fname, aname...) \
58static __inline void \
59fname(void) \
60{ \
61 __asm __volatile("mcr\t" _FX(aname)); \
62}
63
64#define _WF1(fname, aname...) \
65static __inline void \
66fname(register_t reg) \
67{ \
68 __asm __volatile("mcr\t" _FX(aname):: "r" (reg)); \
69}
70
71/*
72 * Raw CP15 maintenance operations
73 * !!! not for external use !!!
74 */
75
76/* TLB */
77
78_WF0(_CP15_TLBIALL, CP15_TLBIALL) /* Invalidate entire unified TLB */
79#if __ARM_ARCH >= 7 && defined SMP
80_WF0(_CP15_TLBIALLIS, CP15_TLBIALLIS) /* Invalidate entire unified TLB IS */
81#endif
82_WF1(_CP15_TLBIASID, CP15_TLBIASID(%0)) /* Invalidate unified TLB by ASID */
83#if __ARM_ARCH >= 7 && defined SMP
84_WF1(_CP15_TLBIASIDIS, CP15_TLBIASIDIS(%0)) /* Invalidate unified TLB by ASID IS */
85#endif
86_WF1(_CP15_TLBIMVAA, CP15_TLBIMVAA(%0)) /* Invalidate unified TLB by MVA, all ASID */
87#if __ARM_ARCH >= 7 && defined SMP
88_WF1(_CP15_TLBIMVAAIS, CP15_TLBIMVAAIS(%0)) /* Invalidate unified TLB by MVA, all ASID IS */
89#endif
90_WF1(_CP15_TLBIMVA, CP15_TLBIMVA(%0)) /* Invalidate unified TLB by MVA */
91
92_WF1(_CP15_TTB_SET, CP15_TTBR0(%0))
93
94/* Cache and Branch predictor */
95
96_WF0(_CP15_BPIALL, CP15_BPIALL) /* Branch predictor invalidate all */
97#if __ARM_ARCH >= 7 && defined SMP
98_WF0(_CP15_BPIALLIS, CP15_BPIALLIS) /* Branch predictor invalidate all IS */
99#endif
100_WF1(_CP15_BPIMVA, CP15_BPIMVA(%0)) /* Branch predictor invalidate by MVA */
101_WF1(_CP15_DCCIMVAC, CP15_DCCIMVAC(%0)) /* Data cache clean and invalidate by MVA PoC */
102_WF1(_CP15_DCCISW, CP15_DCCISW(%0)) /* Data cache clean and invalidate by set/way */
103_WF1(_CP15_DCCMVAC, CP15_DCCMVAC(%0)) /* Data cache clean by MVA PoC */
104#if __ARM_ARCH >= 7
105_WF1(_CP15_DCCMVAU, CP15_DCCMVAU(%0)) /* Data cache clean by MVA PoU */
106#endif
107_WF1(_CP15_DCCSW, CP15_DCCSW(%0)) /* Data cache clean by set/way */
108_WF1(_CP15_DCIMVAC, CP15_DCIMVAC(%0)) /* Data cache invalidate by MVA PoC */
109_WF1(_CP15_DCISW, CP15_DCISW(%0)) /* Data cache invalidate by set/way */
110_WF0(_CP15_ICIALLU, CP15_ICIALLU) /* Instruction cache invalidate all PoU */
111#if __ARM_ARCH >= 7 && defined SMP
112_WF0(_CP15_ICIALLUIS, CP15_ICIALLUIS) /* Instruction cache invalidate all PoU IS */
113#endif
114_WF1(_CP15_ICIMVAU, CP15_ICIMVAU(%0)) /* Instruction cache invalidate */
115
116/*
117 * Publicly accessible functions
118 */
119
120/* Various control registers */
121
122_RF0(cp15_dfsr_get, CP15_DFSR(%0))
123_RF0(cp15_ifsr_get, CP15_IFSR(%0))
124_WF1(cp15_prrr_set, CP15_PRRR(%0))
125_WF1(cp15_nmrr_set, CP15_NMRR(%0))
126_RF0(cp15_ttbr_get, CP15_TTBR0(%0))
127_RF0(cp15_dfar_get, CP15_DFAR(%0))
128#if __ARM_ARCH >= 7
129_RF0(cp15_ifar_get, CP15_IFAR(%0))
130#endif
131
132/*CPU id registers */
133_RF0(cp15_midr_get, CP15_MIDR(%0))
134_RF0(cp15_ctr_get, CP15_CTR(%0))
135_RF0(cp15_tcmtr_get, CP15_TCMTR(%0))
136_RF0(cp15_tlbtr_get, CP15_TLBTR(%0))
137_RF0(cp15_mpidr_get, CP15_MPIDR(%0))
138_RF0(cp15_revidr_get, CP15_REVIDR(%0))
139_RF0(cp15_aidr_get, CP15_AIDR(%0))
140_RF0(cp15_id_pfr0_get, CP15_ID_PFR0(%0))
141_RF0(cp15_id_pfr1_get, CP15_ID_PFR1(%0))
142_RF0(cp15_id_dfr0_get, CP15_ID_DFR0(%0))
143_RF0(cp15_id_afr0_get, CP15_ID_AFR0(%0))
144_RF0(cp15_id_mmfr0_get, CP15_ID_MMFR0(%0))
145_RF0(cp15_id_mmfr1_get, CP15_ID_MMFR1(%0))
146_RF0(cp15_id_mmfr2_get, CP15_ID_MMFR2(%0))
147_RF0(cp15_id_mmfr3_get, CP15_ID_MMFR3(%0))
148_RF0(cp15_id_isar0_get, CP15_ID_ISAR0(%0))
149_RF0(cp15_id_isar1_get, CP15_ID_ISAR1(%0))
150_RF0(cp15_id_isar2_get, CP15_ID_ISAR2(%0))
151_RF0(cp15_id_isar3_get, CP15_ID_ISAR3(%0))
152_RF0(cp15_id_isar4_get, CP15_ID_ISAR4(%0))
153_RF0(cp15_id_isar5_get, CP15_ID_ISAR5(%0))
154_RF0(cp15_cbar_get, CP15_CBAR(%0))
155
156/* Performance Monitor registers */
157
158#if __ARM_ARCH == 6 && defined(CPU_ARM1176)
159_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0))
160_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0))
161#elif __ARM_ARCH > 6
162_RF0(cp15_pmcr_get, CP15_PMCR(%0))
163_WF1(cp15_pmcr_set, CP15_PMCR(%0))
164_RF0(cp15_pmcnten_get, CP15_PMCNTENSET(%0))
165_WF1(cp15_pmcnten_set, CP15_PMCNTENSET(%0))
166_WF1(cp15_pmcnten_clr, CP15_PMCNTENCLR(%0))
167_RF0(cp15_pmovsr_get, CP15_PMOVSR(%0))
168_WF1(cp15_pmovsr_set, CP15_PMOVSR(%0))
169_WF1(cp15_pmswinc_set, CP15_PMSWINC(%0))
170_RF0(cp15_pmselr_get, CP15_PMSELR(%0))
171_WF1(cp15_pmselr_set, CP15_PMSELR(%0))
172_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0))
173_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0))
174_RF0(cp15_pmxevtyper_get, CP15_PMXEVTYPER(%0))
175_WF1(cp15_pmxevtyper_set, CP15_PMXEVTYPER(%0))
176_RF0(cp15_pmxevcntr_get, CP15_PMXEVCNTRR(%0))
177_WF1(cp15_pmxevcntr_set, CP15_PMXEVCNTRR(%0))
178_RF0(cp15_pmuserenr_get, CP15_PMUSERENR(%0))
179_WF1(cp15_pmuserenr_set, CP15_PMUSERENR(%0))
180_RF0(cp15_pminten_get, CP15_PMINTENSET(%0))
181_WF1(cp15_pminten_set, CP15_PMINTENSET(%0))
182_WF1(cp15_pminten_clr, CP15_PMINTENCLR(%0))
183#endif
184
185_RF0(cp15_tpidrurw_get, CP15_TPIDRURW(%0))
186_WF1(cp15_tpidrurw_set, CP15_TPIDRURW(%0))
187_RF0(cp15_tpidruro_get, CP15_TPIDRURO(%0))
188_WF1(cp15_tpidruro_set, CP15_TPIDRURO(%0))
189_RF0(cp15_tpidrpwr_get, CP15_TPIDRPRW(%0))
190_WF1(cp15_tpidrpwr_set, CP15_TPIDRPRW(%0))
191
192#undef _FX
193#undef _RF0
194#undef _WF0
195#undef _WF1
196
197/*
198 * TLB maintenance operations.
199 */
200
201/* Local (i.e. not broadcasting ) operations. */
202
203/* Flush all TLB entries (even global). */
204static __inline void
205tlb_flush_all_local(void)
206{
207
208 dsb();
209 _CP15_TLBIALL();
210 dsb();
211}
212
213/* Flush all not global TLB entries. */
214static __inline void
215tlb_flush_all_ng_local(void)
216{
217
218 dsb();
219 _CP15_TLBIASID(CPU_ASID_KERNEL);
220 dsb();
221}
222
223/* Flush single TLB entry (even global). */
224static __inline void
225tlb_flush_local(vm_offset_t sva)
226{
227
228 dsb();
229 _CP15_TLBIMVA((sva & ~PAGE_MASK ) | CPU_ASID_KERNEL);
230 dsb();
231}
232
233/* Flush range of TLB entries (even global). */
234static __inline void
235tlb_flush_range_local(vm_offset_t sva, vm_size_t size)
236{
237 vm_offset_t va;
238 vm_offset_t eva = sva + size;
239
240 dsb();
241 for (va = sva; va < eva; va += PAGE_SIZE)
242 _CP15_TLBIMVA((va & ~PAGE_MASK ) | CPU_ASID_KERNEL);
243 dsb();
244}
245
246/* Broadcasting operations. */
247#if __ARM_ARCH >= 7 && defined SMP
248
249static __inline void
250tlb_flush_all(void)
251{
252
253 dsb();
254 _CP15_TLBIALLIS();
255 dsb();
256}
257
258static __inline void
259tlb_flush_all_ng(void)
260{
261
262 dsb();
263 _CP15_TLBIASIDIS(CPU_ASID_KERNEL);
264 dsb();
265}
266
267static __inline void
268tlb_flush(vm_offset_t sva)
269{
270
271 dsb();
272 _CP15_TLBIMVAAIS(sva);
273 dsb();
274}
275
276static __inline void
277tlb_flush_range(vm_offset_t sva, vm_size_t size)
278{
279 vm_offset_t va;
280 vm_offset_t eva = sva + size;
281
282 dsb();
283 for (va = sva; va < eva; va += PAGE_SIZE)
284 _CP15_TLBIMVAAIS(va);
285 dsb();
286}
287#else /* SMP */
288
289#define tlb_flush_all() tlb_flush_all_local()
290#define tlb_flush_all_ng() tlb_flush_all_ng_local()
291#define tlb_flush(sva) tlb_flush_local(sva)
292#define tlb_flush_range(sva, size) tlb_flush_range_local(sva, size)
293
294#endif /* SMP */
295
296/*
297 * Cache maintenance operations.
298 */
299
300/* Sync I and D caches to PoU */
301static __inline void
302icache_sync(vm_offset_t sva, vm_size_t size)
303{
304 vm_offset_t va;
305 vm_offset_t eva = sva + size;
306
307 dsb();
305 for (va = sva; va < eva; va += arm_dcache_align) {
308 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) {
306#if __ARM_ARCH >= 7 && defined SMP
307 _CP15_DCCMVAU(va);
308#else
309 _CP15_DCCMVAC(va);
310#endif
311 }
312 dsb();
313#if __ARM_ARCH >= 7 && defined SMP
314 _CP15_ICIALLUIS();
315#else
316 _CP15_ICIALLU();
317#endif
318 dsb();
319 isb();
320}
321
322/* Invalidate I cache */
323static __inline void
324icache_inv_all(void)
325{
326#if __ARM_ARCH >= 7 && defined SMP
327 _CP15_ICIALLUIS();
328#else
329 _CP15_ICIALLU();
330#endif
331 dsb();
332 isb();
333}
334
309#if __ARM_ARCH >= 7 && defined SMP
310 _CP15_DCCMVAU(va);
311#else
312 _CP15_DCCMVAC(va);
313#endif
314 }
315 dsb();
316#if __ARM_ARCH >= 7 && defined SMP
317 _CP15_ICIALLUIS();
318#else
319 _CP15_ICIALLU();
320#endif
321 dsb();
322 isb();
323}
324
325/* Invalidate I cache */
326static __inline void
327icache_inv_all(void)
328{
329#if __ARM_ARCH >= 7 && defined SMP
330 _CP15_ICIALLUIS();
331#else
332 _CP15_ICIALLU();
333#endif
334 dsb();
335 isb();
336}
337
338/* Invalidate branch predictor buffer */
339static __inline void
340bpb_inv_all(void)
341{
342#if __ARM_ARCH >= 7 && defined SMP
343 _CP15_BPIALLIS();
344#else
345 _CP15_BPIALL();
346#endif
347 dsb();
348 isb();
349}
350
335/* Write back D-cache to PoU */
336static __inline void
337dcache_wb_pou(vm_offset_t sva, vm_size_t size)
338{
339 vm_offset_t va;
340 vm_offset_t eva = sva + size;
341
342 dsb();
351/* Write back D-cache to PoU */
352static __inline void
353dcache_wb_pou(vm_offset_t sva, vm_size_t size)
354{
355 vm_offset_t va;
356 vm_offset_t eva = sva + size;
357
358 dsb();
343 for (va = sva; va < eva; va += arm_dcache_align) {
359 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) {
344#if __ARM_ARCH >= 7 && defined SMP
345 _CP15_DCCMVAU(va);
346#else
347 _CP15_DCCMVAC(va);
348#endif
349 }
350 dsb();
351}
352
353/* Invalidate D-cache to PoC */
354static __inline void
355dcache_inv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
356{
357 vm_offset_t va;
358 vm_offset_t eva = sva + size;
359
360 /* invalidate L1 first */
360#if __ARM_ARCH >= 7 && defined SMP
361 _CP15_DCCMVAU(va);
362#else
363 _CP15_DCCMVAC(va);
364#endif
365 }
366 dsb();
367}
368
369/* Invalidate D-cache to PoC */
370static __inline void
371dcache_inv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
372{
373 vm_offset_t va;
374 vm_offset_t eva = sva + size;
375
376 /* invalidate L1 first */
361 for (va = sva; va < eva; va += arm_dcache_align) {
377 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) {
362 _CP15_DCIMVAC(va);
363 }
364 dsb();
365
366 /* then L2 */
367 cpu_l2cache_inv_range(pa, size);
368 dsb();
369
370 /* then L1 again */
378 _CP15_DCIMVAC(va);
379 }
380 dsb();
381
382 /* then L2 */
383 cpu_l2cache_inv_range(pa, size);
384 dsb();
385
386 /* then L1 again */
371 for (va = sva; va < eva; va += arm_dcache_align) {
387 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) {
372 _CP15_DCIMVAC(va);
373 }
374 dsb();
375}
376
377/* Write back D-cache to PoC */
378static __inline void
379dcache_wb_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
380{
381 vm_offset_t va;
382 vm_offset_t eva = sva + size;
383
384 dsb();
385
388 _CP15_DCIMVAC(va);
389 }
390 dsb();
391}
392
393/* Write back D-cache to PoC */
394static __inline void
395dcache_wb_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
396{
397 vm_offset_t va;
398 vm_offset_t eva = sva + size;
399
400 dsb();
401
386 for (va = sva; va < eva; va += arm_dcache_align) {
402 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) {
387 _CP15_DCCMVAC(va);
388 }
389 dsb();
390
391 cpu_l2cache_wb_range(pa, size);
392}
393
394/* Write back and invalidate D-cache to PoC */
395static __inline void
396dcache_wbinv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
397{
398 vm_offset_t va;
399 vm_offset_t eva = sva + size;
400
401 dsb();
402
403 /* write back L1 first */
403 _CP15_DCCMVAC(va);
404 }
405 dsb();
406
407 cpu_l2cache_wb_range(pa, size);
408}
409
410/* Write back and invalidate D-cache to PoC */
411static __inline void
412dcache_wbinv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size)
413{
414 vm_offset_t va;
415 vm_offset_t eva = sva + size;
416
417 dsb();
418
419 /* write back L1 first */
404 for (va = sva; va < eva; va += arm_dcache_align) {
420 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) {
405 _CP15_DCCMVAC(va);
406 }
407 dsb();
408
409 /* then write back and invalidate L2 */
410 cpu_l2cache_wbinv_range(pa, size);
411
412 /* then invalidate L1 */
421 _CP15_DCCMVAC(va);
422 }
423 dsb();
424
425 /* then write back and invalidate L2 */
426 cpu_l2cache_wbinv_range(pa, size);
427
428 /* then invalidate L1 */
413 for (va = sva; va < eva; va += arm_dcache_align) {
429 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) {
414 _CP15_DCIMVAC(va);
415 }
416 dsb();
417}
418
419/* Set TTB0 register */
420static __inline void
421cp15_ttbr_set(uint32_t reg)
422{
423 dsb();
424 _CP15_TTB_SET(reg);
425 dsb();
426 _CP15_BPIALL();
427 dsb();
428 isb();
429 tlb_flush_all_ng_local();
430}
431
432#endif /* !MACHINE_CPU_V6_H */
430 _CP15_DCIMVAC(va);
431 }
432 dsb();
433}
434
435/* Set TTB0 register */
436static __inline void
437cp15_ttbr_set(uint32_t reg)
438{
439 dsb();
440 _CP15_TTB_SET(reg);
441 dsb();
442 _CP15_BPIALL();
443 dsb();
444 isb();
445 tlb_flush_all_ng_local();
446}
447
448#endif /* !MACHINE_CPU_V6_H */