cpu-v6.h revision 283336
1/*- 2 * Copyright 2014 Svatopluk Kraus <onwahe@gmail.com> 3 * Copyright 2014 Michal Meloun <meloun@miracle.cz> 4 * All rights reserved. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 1. Redistributions of source code must retain the above copyright 10 * notice, this list of conditions and the following disclaimer. 11 * 2. Redistributions in binary form must reproduce the above copyright 12 * notice, this list of conditions and the following disclaimer in the 13 * documentation and/or other materials provided with the distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 25 * SUCH DAMAGE. 26 * 27 * $FreeBSD: stable/10/sys/arm/include/cpu-v6.h 283336 2015-05-23 23:05:31Z ian $ 28 */ 29#ifndef MACHINE_CPU_V6_H 30#define MACHINE_CPU_V6_H 31 32#include "machine/atomic.h" 33#include "machine/cpufunc.h" 34#include "machine/cpuinfo.h" 35#include "machine/sysreg.h" 36 37 38#define CPU_ASID_KERNEL 0 39 40vm_offset_t dcache_wb_pou_checked(vm_offset_t, vm_size_t); 41vm_offset_t icache_inv_pou_checked(vm_offset_t, vm_size_t); 42 43/* 44 * Macros to generate CP15 (system control processor) read/write functions. 45 */ 46#define _FX(s...) #s 47 48#define _RF0(fname, aname...) \ 49static __inline register_t \ 50fname(void) \ 51{ \ 52 register_t reg; \ 53 __asm __volatile("mrc\t" _FX(aname): "=r" (reg)); \ 54 return(reg); \ 55} 56 57#define _WF0(fname, aname...) \ 58static __inline void \ 59fname(void) \ 60{ \ 61 __asm __volatile("mcr\t" _FX(aname)); \ 62} 63 64#define _WF1(fname, aname...) \ 65static __inline void \ 66fname(register_t reg) \ 67{ \ 68 __asm __volatile("mcr\t" _FX(aname):: "r" (reg)); \ 69} 70 71/* 72 * Raw CP15 maintenance operations 73 * !!! not for external use !!! 74 */ 75 76/* TLB */ 77 78_WF0(_CP15_TLBIALL, CP15_TLBIALL) /* Invalidate entire unified TLB */ 79#if __ARM_ARCH >= 7 && defined SMP 80_WF0(_CP15_TLBIALLIS, CP15_TLBIALLIS) /* Invalidate entire unified TLB IS */ 81#endif 82_WF1(_CP15_TLBIASID, CP15_TLBIASID(%0)) /* Invalidate unified TLB by ASID */ 83#if __ARM_ARCH >= 7 && defined SMP 84_WF1(_CP15_TLBIASIDIS, CP15_TLBIASIDIS(%0)) /* Invalidate unified TLB by ASID IS */ 85#endif 86_WF1(_CP15_TLBIMVAA, CP15_TLBIMVAA(%0)) /* Invalidate unified TLB by MVA, all ASID */ 87#if __ARM_ARCH >= 7 && defined SMP 88_WF1(_CP15_TLBIMVAAIS, CP15_TLBIMVAAIS(%0)) /* Invalidate unified TLB by MVA, all ASID IS */ 89#endif 90_WF1(_CP15_TLBIMVA, CP15_TLBIMVA(%0)) /* Invalidate unified TLB by MVA */ 91 92_WF1(_CP15_TTB_SET, CP15_TTBR0(%0)) 93 94/* Cache and Branch predictor */ 95 96_WF0(_CP15_BPIALL, CP15_BPIALL) /* Branch predictor invalidate all */ 97#if __ARM_ARCH >= 7 && defined SMP 98_WF0(_CP15_BPIALLIS, CP15_BPIALLIS) /* Branch predictor invalidate all IS */ 99#endif 100_WF1(_CP15_BPIMVA, CP15_BPIMVA(%0)) /* Branch predictor invalidate by MVA */ 101_WF1(_CP15_DCCIMVAC, CP15_DCCIMVAC(%0)) /* Data cache clean and invalidate by MVA PoC */ 102_WF1(_CP15_DCCISW, CP15_DCCISW(%0)) /* Data cache clean and invalidate by set/way */ 103_WF1(_CP15_DCCMVAC, CP15_DCCMVAC(%0)) /* Data cache clean by MVA PoC */ 104#if __ARM_ARCH >= 7 105_WF1(_CP15_DCCMVAU, CP15_DCCMVAU(%0)) /* Data cache clean by MVA PoU */ 106#endif 107_WF1(_CP15_DCCSW, CP15_DCCSW(%0)) /* Data cache clean by set/way */ 108_WF1(_CP15_DCIMVAC, CP15_DCIMVAC(%0)) /* Data cache invalidate by MVA PoC */ 109_WF1(_CP15_DCISW, CP15_DCISW(%0)) /* Data cache invalidate by set/way */ 110_WF0(_CP15_ICIALLU, CP15_ICIALLU) /* Instruction cache invalidate all PoU */ 111#if __ARM_ARCH >= 7 && defined SMP 112_WF0(_CP15_ICIALLUIS, CP15_ICIALLUIS) /* Instruction cache invalidate all PoU IS */ 113#endif 114_WF1(_CP15_ICIMVAU, CP15_ICIMVAU(%0)) /* Instruction cache invalidate */ 115 116/* 117 * Publicly accessible functions 118 */ 119 120/* Various control registers */ 121 122_RF0(cp15_dfsr_get, CP15_DFSR(%0)) 123_RF0(cp15_ifsr_get, CP15_IFSR(%0)) 124_WF1(cp15_prrr_set, CP15_PRRR(%0)) 125_WF1(cp15_nmrr_set, CP15_NMRR(%0)) 126_RF0(cp15_ttbr_get, CP15_TTBR0(%0)) 127_RF0(cp15_dfar_get, CP15_DFAR(%0)) 128#if __ARM_ARCH >= 7 129_RF0(cp15_ifar_get, CP15_IFAR(%0)) 130#endif 131 132/*CPU id registers */ 133_RF0(cp15_midr_get, CP15_MIDR(%0)) 134_RF0(cp15_ctr_get, CP15_CTR(%0)) 135_RF0(cp15_tcmtr_get, CP15_TCMTR(%0)) 136_RF0(cp15_tlbtr_get, CP15_TLBTR(%0)) 137_RF0(cp15_mpidr_get, CP15_MPIDR(%0)) 138_RF0(cp15_revidr_get, CP15_REVIDR(%0)) 139_RF0(cp15_aidr_get, CP15_AIDR(%0)) 140_RF0(cp15_id_pfr0_get, CP15_ID_PFR0(%0)) 141_RF0(cp15_id_pfr1_get, CP15_ID_PFR1(%0)) 142_RF0(cp15_id_dfr0_get, CP15_ID_DFR0(%0)) 143_RF0(cp15_id_afr0_get, CP15_ID_AFR0(%0)) 144_RF0(cp15_id_mmfr0_get, CP15_ID_MMFR0(%0)) 145_RF0(cp15_id_mmfr1_get, CP15_ID_MMFR1(%0)) 146_RF0(cp15_id_mmfr2_get, CP15_ID_MMFR2(%0)) 147_RF0(cp15_id_mmfr3_get, CP15_ID_MMFR3(%0)) 148_RF0(cp15_id_isar0_get, CP15_ID_ISAR0(%0)) 149_RF0(cp15_id_isar1_get, CP15_ID_ISAR1(%0)) 150_RF0(cp15_id_isar2_get, CP15_ID_ISAR2(%0)) 151_RF0(cp15_id_isar3_get, CP15_ID_ISAR3(%0)) 152_RF0(cp15_id_isar4_get, CP15_ID_ISAR4(%0)) 153_RF0(cp15_id_isar5_get, CP15_ID_ISAR5(%0)) 154_RF0(cp15_cbar_get, CP15_CBAR(%0)) 155 156/* Performance Monitor registers */ 157 158#if __ARM_ARCH == 6 && defined(CPU_ARM1176) 159_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0)) 160_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0)) 161#elif __ARM_ARCH > 6 162_RF0(cp15_pmcr_get, CP15_PMCR(%0)) 163_WF1(cp15_pmcr_set, CP15_PMCR(%0)) 164_RF0(cp15_pmcnten_get, CP15_PMCNTENSET(%0)) 165_WF1(cp15_pmcnten_set, CP15_PMCNTENSET(%0)) 166_WF1(cp15_pmcnten_clr, CP15_PMCNTENCLR(%0)) 167_RF0(cp15_pmovsr_get, CP15_PMOVSR(%0)) 168_WF1(cp15_pmovsr_set, CP15_PMOVSR(%0)) 169_WF1(cp15_pmswinc_set, CP15_PMSWINC(%0)) 170_RF0(cp15_pmselr_get, CP15_PMSELR(%0)) 171_WF1(cp15_pmselr_set, CP15_PMSELR(%0)) 172_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0)) 173_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0)) 174_RF0(cp15_pmxevtyper_get, CP15_PMXEVTYPER(%0)) 175_WF1(cp15_pmxevtyper_set, CP15_PMXEVTYPER(%0)) 176_RF0(cp15_pmxevcntr_get, CP15_PMXEVCNTRR(%0)) 177_WF1(cp15_pmxevcntr_set, CP15_PMXEVCNTRR(%0)) 178_RF0(cp15_pmuserenr_get, CP15_PMUSERENR(%0)) 179_WF1(cp15_pmuserenr_set, CP15_PMUSERENR(%0)) 180_RF0(cp15_pminten_get, CP15_PMINTENSET(%0)) 181_WF1(cp15_pminten_set, CP15_PMINTENSET(%0)) 182_WF1(cp15_pminten_clr, CP15_PMINTENCLR(%0)) 183#endif 184 185#undef _FX 186#undef _RF0 187#undef _WF0 188#undef _WF1 189 190/* 191 * TLB maintenance operations. 192 */ 193 194/* Local (i.e. not broadcasting ) operations. */ 195 196/* Flush all TLB entries (even global). */ 197static __inline void 198tlb_flush_all_local(void) 199{ 200 201 dsb(); 202 _CP15_TLBIALL(); 203 dsb(); 204} 205 206/* Flush all not global TLB entries. */ 207static __inline void 208tlb_flush_all_ng_local(void) 209{ 210 211 dsb(); 212 _CP15_TLBIASID(CPU_ASID_KERNEL); 213 dsb(); 214} 215 216/* Flush single TLB entry (even global). */ 217static __inline void 218tlb_flush_local(vm_offset_t sva) 219{ 220 221 dsb(); 222 _CP15_TLBIMVA((sva & ~PAGE_MASK ) | CPU_ASID_KERNEL); 223 dsb(); 224} 225 226/* Flush range of TLB entries (even global). */ 227static __inline void 228tlb_flush_range_local(vm_offset_t sva, vm_size_t size) 229{ 230 vm_offset_t va; 231 vm_offset_t eva = sva + size; 232 233 dsb(); 234 for (va = sva; va < eva; va += PAGE_SIZE) 235 _CP15_TLBIMVA((va & ~PAGE_MASK ) | CPU_ASID_KERNEL); 236 dsb(); 237} 238 239/* Broadcasting operations. */ 240#if __ARM_ARCH >= 7 && defined SMP 241 242static __inline void 243tlb_flush_all(void) 244{ 245 246 dsb(); 247 _CP15_TLBIALLIS(); 248 dsb(); 249} 250 251static __inline void 252tlb_flush_all_ng(void) 253{ 254 255 dsb(); 256 _CP15_TLBIASIDIS(CPU_ASID_KERNEL); 257 dsb(); 258} 259 260static __inline void 261tlb_flush(vm_offset_t sva) 262{ 263 264 dsb(); 265 _CP15_TLBIMVAAIS(sva); 266 dsb(); 267} 268 269static __inline void 270tlb_flush_range(vm_offset_t sva, vm_size_t size) 271{ 272 vm_offset_t va; 273 vm_offset_t eva = sva + size; 274 275 dsb(); 276 for (va = sva; va < eva; va += PAGE_SIZE) 277 _CP15_TLBIMVAAIS(va); 278 dsb(); 279} 280#else /* SMP */ 281 282#define tlb_flush_all() tlb_flush_all_local() 283#define tlb_flush_all_ng() tlb_flush_all_ng_local() 284#define tlb_flush(sva) tlb_flush_local(sva) 285#define tlb_flush_range(sva, size) tlb_flush_range_local(sva, size) 286 287#endif /* SMP */ 288 289/* 290 * Cache maintenance operations. 291 */ 292 293/* Sync I and D caches to PoU */ 294static __inline void 295icache_sync(vm_offset_t sva, vm_size_t size) 296{ 297 vm_offset_t va; 298 vm_offset_t eva = sva + size; 299 300 dsb(); 301 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) { 302#if __ARM_ARCH >= 7 && defined SMP 303 _CP15_DCCMVAU(va); 304#else 305 _CP15_DCCMVAC(va); 306#endif 307 } 308 dsb(); 309#if __ARM_ARCH >= 7 && defined SMP 310 _CP15_ICIALLUIS(); 311#else 312 _CP15_ICIALLU(); 313#endif 314 dsb(); 315 isb(); 316} 317 318/* Invalidate I cache */ 319static __inline void 320icache_inv_all(void) 321{ 322#if __ARM_ARCH >= 7 && defined SMP 323 _CP15_ICIALLUIS(); 324#else 325 _CP15_ICIALLU(); 326#endif 327 dsb(); 328 isb(); 329} 330 331/* Invalidate branch predictor buffer */ 332static __inline void 333bpb_inv_all(void) 334{ 335#if __ARM_ARCH >= 7 && defined SMP 336 _CP15_BPIALLIS(); 337#else 338 _CP15_BPIALL(); 339#endif 340 dsb(); 341 isb(); 342} 343 344/* Write back D-cache to PoU */ 345static __inline void 346dcache_wb_pou(vm_offset_t sva, vm_size_t size) 347{ 348 vm_offset_t va; 349 vm_offset_t eva = sva + size; 350 351 dsb(); 352 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) { 353#if __ARM_ARCH >= 7 && defined SMP 354 _CP15_DCCMVAU(va); 355#else 356 _CP15_DCCMVAC(va); 357#endif 358 } 359 dsb(); 360} 361 362/* Invalidate D-cache to PoC */ 363static __inline void 364dcache_inv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size) 365{ 366 vm_offset_t va; 367 vm_offset_t eva = sva + size; 368 369 /* invalidate L1 first */ 370 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) { 371 _CP15_DCIMVAC(va); 372 } 373 dsb(); 374 375 /* then L2 */ 376 cpu_l2cache_inv_range(pa, size); 377 dsb(); 378 379 /* then L1 again */ 380 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) { 381 _CP15_DCIMVAC(va); 382 } 383 dsb(); 384} 385 386/* Write back D-cache to PoC */ 387static __inline void 388dcache_wb_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size) 389{ 390 vm_offset_t va; 391 vm_offset_t eva = sva + size; 392 393 dsb(); 394 395 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) { 396 _CP15_DCCMVAC(va); 397 } 398 dsb(); 399 400 cpu_l2cache_wb_range(pa, size); 401} 402 403/* Write back and invalidate D-cache to PoC */ 404static __inline void 405dcache_wbinv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size) 406{ 407 vm_offset_t va; 408 vm_offset_t eva = sva + size; 409 410 dsb(); 411 412 /* write back L1 first */ 413 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) { 414 _CP15_DCCMVAC(va); 415 } 416 dsb(); 417 418 /* then write back and invalidate L2 */ 419 cpu_l2cache_wbinv_range(pa, size); 420 421 /* then invalidate L1 */ 422 for (va = sva; va < eva; va += cpuinfo.dcache_line_size) { 423 _CP15_DCIMVAC(va); 424 } 425 dsb(); 426} 427 428/* Set TTB0 register */ 429static __inline void 430cp15_ttbr_set(uint32_t reg) 431{ 432 dsb(); 433 _CP15_TTB_SET(reg); 434 dsb(); 435 _CP15_BPIALL(); 436 dsb(); 437 isb(); 438 tlb_flush_all_ng_local(); 439} 440 441#endif /* !MACHINE_CPU_V6_H */ 442