cpu-v4.h revision 289759
1/*- 2 * Copyright 2014 Svatopluk Kraus <onwahe@gmail.com> 3 * Copyright 2014 Michal Meloun <meloun@miracle.cz> 4 * All rights reserved. 5 * 6 * Redistribution and use in source and binary forms, with or without 7 * modification, are permitted provided that the following conditions 8 * are met: 9 * 1. Redistributions of source code must retain the above copyright 10 * notice, this list of conditions and the following disclaimer. 11 * 2. Redistributions in binary form must reproduce the above copyright 12 * notice, this list of conditions and the following disclaimer in the 13 * documentation and/or other materials provided with the distribution. 14 * 15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 25 * SUCH DAMAGE. 26 * 27 * $FreeBSD: head/sys/arm/include/cpu-v6.h 289759 2015-10-22 16:38:01Z jah $ 28 */ 29#ifndef MACHINE_CPU_V6_H 30#define MACHINE_CPU_V6_H 31 32/* There are no user serviceable parts here, they may change without notice */ 33#ifndef _KERNEL 34#error Only include this file in the kernel 35#else 36 37#include "machine/atomic.h" 38#include "machine/cpufunc.h" 39#include "machine/cpuinfo.h" 40#include "machine/sysreg.h" 41 42#define CPU_ASID_KERNEL 0 43 44vm_offset_t dcache_wb_pou_checked(vm_offset_t, vm_size_t); 45vm_offset_t icache_inv_pou_checked(vm_offset_t, vm_size_t); 46 47/* 48 * Macros to generate CP15 (system control processor) read/write functions. 49 */ 50#define _FX(s...) #s 51 52#define _RF0(fname, aname...) \ 53static __inline register_t \ 54fname(void) \ 55{ \ 56 register_t reg; \ 57 __asm __volatile("mrc\t" _FX(aname): "=r" (reg)); \ 58 return(reg); \ 59} 60 61#define _R64F0(fname, aname) \ 62static __inline uint64_t \ 63fname(void) \ 64{ \ 65 uint64_t reg; \ 66 __asm __volatile("mrrc\t" _FX(aname): "=r" (reg)); \ 67 return(reg); \ 68} 69 70#define _WF0(fname, aname...) \ 71static __inline void \ 72fname(void) \ 73{ \ 74 __asm __volatile("mcr\t" _FX(aname)); \ 75} 76 77#define _WF1(fname, aname...) \ 78static __inline void \ 79fname(register_t reg) \ 80{ \ 81 __asm __volatile("mcr\t" _FX(aname):: "r" (reg)); \ 82} 83 84#define _W64F1(fname, aname...) \ 85static __inline void \ 86fname(uint64_t reg) \ 87{ \ 88 __asm __volatile("mcrr\t" _FX(aname):: "r" (reg)); \ 89} 90 91/* 92 * Raw CP15 maintenance operations 93 * !!! not for external use !!! 94 */ 95 96/* TLB */ 97 98_WF0(_CP15_TLBIALL, CP15_TLBIALL) /* Invalidate entire unified TLB */ 99#if __ARM_ARCH >= 7 && defined SMP 100_WF0(_CP15_TLBIALLIS, CP15_TLBIALLIS) /* Invalidate entire unified TLB IS */ 101#endif 102_WF1(_CP15_TLBIASID, CP15_TLBIASID(%0)) /* Invalidate unified TLB by ASID */ 103#if __ARM_ARCH >= 7 && defined SMP 104_WF1(_CP15_TLBIASIDIS, CP15_TLBIASIDIS(%0)) /* Invalidate unified TLB by ASID IS */ 105#endif 106_WF1(_CP15_TLBIMVAA, CP15_TLBIMVAA(%0)) /* Invalidate unified TLB by MVA, all ASID */ 107#if __ARM_ARCH >= 7 && defined SMP 108_WF1(_CP15_TLBIMVAAIS, CP15_TLBIMVAAIS(%0)) /* Invalidate unified TLB by MVA, all ASID IS */ 109#endif 110_WF1(_CP15_TLBIMVA, CP15_TLBIMVA(%0)) /* Invalidate unified TLB by MVA */ 111 112_WF1(_CP15_TTB_SET, CP15_TTBR0(%0)) 113 114/* Cache and Branch predictor */ 115 116_WF0(_CP15_BPIALL, CP15_BPIALL) /* Branch predictor invalidate all */ 117#if __ARM_ARCH >= 7 && defined SMP 118_WF0(_CP15_BPIALLIS, CP15_BPIALLIS) /* Branch predictor invalidate all IS */ 119#endif 120_WF1(_CP15_BPIMVA, CP15_BPIMVA(%0)) /* Branch predictor invalidate by MVA */ 121_WF1(_CP15_DCCIMVAC, CP15_DCCIMVAC(%0)) /* Data cache clean and invalidate by MVA PoC */ 122_WF1(_CP15_DCCISW, CP15_DCCISW(%0)) /* Data cache clean and invalidate by set/way */ 123_WF1(_CP15_DCCMVAC, CP15_DCCMVAC(%0)) /* Data cache clean by MVA PoC */ 124#if __ARM_ARCH >= 7 125_WF1(_CP15_DCCMVAU, CP15_DCCMVAU(%0)) /* Data cache clean by MVA PoU */ 126#endif 127_WF1(_CP15_DCCSW, CP15_DCCSW(%0)) /* Data cache clean by set/way */ 128_WF1(_CP15_DCIMVAC, CP15_DCIMVAC(%0)) /* Data cache invalidate by MVA PoC */ 129_WF1(_CP15_DCISW, CP15_DCISW(%0)) /* Data cache invalidate by set/way */ 130_WF0(_CP15_ICIALLU, CP15_ICIALLU) /* Instruction cache invalidate all PoU */ 131#if __ARM_ARCH >= 7 && defined SMP 132_WF0(_CP15_ICIALLUIS, CP15_ICIALLUIS) /* Instruction cache invalidate all PoU IS */ 133#endif 134_WF1(_CP15_ICIMVAU, CP15_ICIMVAU(%0)) /* Instruction cache invalidate */ 135 136/* 137 * Publicly accessible functions 138 */ 139 140/* Various control registers */ 141 142_RF0(cp15_cpacr_get, CP15_CPACR(%0)) 143_WF1(cp15_cpacr_set, CP15_CPACR(%0)) 144_RF0(cp15_dfsr_get, CP15_DFSR(%0)) 145_RF0(cp15_ifsr_get, CP15_IFSR(%0)) 146_WF1(cp15_prrr_set, CP15_PRRR(%0)) 147_WF1(cp15_nmrr_set, CP15_NMRR(%0)) 148_RF0(cp15_ttbr_get, CP15_TTBR0(%0)) 149_RF0(cp15_dfar_get, CP15_DFAR(%0)) 150#if __ARM_ARCH >= 7 151_RF0(cp15_ifar_get, CP15_IFAR(%0)) 152_RF0(cp15_l2ctlr_get, CP15_L2CTLR(%0)) 153#endif 154/* ARMv6+ and XScale */ 155_RF0(cp15_actlr_get, CP15_ACTLR(%0)) 156_WF1(cp15_actlr_set, CP15_ACTLR(%0)) 157#if __ARM_ARCH >= 6 158_WF1(cp15_ats1cpr_set, CP15_ATS1CPR(%0)); 159_RF0(cp15_par_get, CP15_PAR); 160_RF0(cp15_sctlr_get, CP15_SCTLR(%0)) 161#endif 162 163/*CPU id registers */ 164_RF0(cp15_midr_get, CP15_MIDR(%0)) 165_RF0(cp15_ctr_get, CP15_CTR(%0)) 166_RF0(cp15_tcmtr_get, CP15_TCMTR(%0)) 167_RF0(cp15_tlbtr_get, CP15_TLBTR(%0)) 168_RF0(cp15_mpidr_get, CP15_MPIDR(%0)) 169_RF0(cp15_revidr_get, CP15_REVIDR(%0)) 170_RF0(cp15_ccsidr_get, CP15_CCSIDR(%0)) 171_RF0(cp15_clidr_get, CP15_CLIDR(%0)) 172_RF0(cp15_aidr_get, CP15_AIDR(%0)) 173_WF1(cp15_csselr_set, CP15_CSSELR(%0)) 174_RF0(cp15_id_pfr0_get, CP15_ID_PFR0(%0)) 175_RF0(cp15_id_pfr1_get, CP15_ID_PFR1(%0)) 176_RF0(cp15_id_dfr0_get, CP15_ID_DFR0(%0)) 177_RF0(cp15_id_afr0_get, CP15_ID_AFR0(%0)) 178_RF0(cp15_id_mmfr0_get, CP15_ID_MMFR0(%0)) 179_RF0(cp15_id_mmfr1_get, CP15_ID_MMFR1(%0)) 180_RF0(cp15_id_mmfr2_get, CP15_ID_MMFR2(%0)) 181_RF0(cp15_id_mmfr3_get, CP15_ID_MMFR3(%0)) 182_RF0(cp15_id_isar0_get, CP15_ID_ISAR0(%0)) 183_RF0(cp15_id_isar1_get, CP15_ID_ISAR1(%0)) 184_RF0(cp15_id_isar2_get, CP15_ID_ISAR2(%0)) 185_RF0(cp15_id_isar3_get, CP15_ID_ISAR3(%0)) 186_RF0(cp15_id_isar4_get, CP15_ID_ISAR4(%0)) 187_RF0(cp15_id_isar5_get, CP15_ID_ISAR5(%0)) 188_RF0(cp15_cbar_get, CP15_CBAR(%0)) 189 190/* Performance Monitor registers */ 191 192#if __ARM_ARCH == 6 && defined(CPU_ARM1176) 193_RF0(cp15_pmuserenr_get, CP15_PMUSERENR(%0)) 194_WF1(cp15_pmuserenr_set, CP15_PMUSERENR(%0)) 195_RF0(cp15_pmcr_get, CP15_PMCR(%0)) 196_WF1(cp15_pmcr_set, CP15_PMCR(%0)) 197_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0)) 198_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0)) 199#elif __ARM_ARCH > 6 200_RF0(cp15_pmcr_get, CP15_PMCR(%0)) 201_WF1(cp15_pmcr_set, CP15_PMCR(%0)) 202_RF0(cp15_pmcnten_get, CP15_PMCNTENSET(%0)) 203_WF1(cp15_pmcnten_set, CP15_PMCNTENSET(%0)) 204_WF1(cp15_pmcnten_clr, CP15_PMCNTENCLR(%0)) 205_RF0(cp15_pmovsr_get, CP15_PMOVSR(%0)) 206_WF1(cp15_pmovsr_set, CP15_PMOVSR(%0)) 207_WF1(cp15_pmswinc_set, CP15_PMSWINC(%0)) 208_RF0(cp15_pmselr_get, CP15_PMSELR(%0)) 209_WF1(cp15_pmselr_set, CP15_PMSELR(%0)) 210_RF0(cp15_pmccntr_get, CP15_PMCCNTR(%0)) 211_WF1(cp15_pmccntr_set, CP15_PMCCNTR(%0)) 212_RF0(cp15_pmxevtyper_get, CP15_PMXEVTYPER(%0)) 213_WF1(cp15_pmxevtyper_set, CP15_PMXEVTYPER(%0)) 214_RF0(cp15_pmxevcntr_get, CP15_PMXEVCNTRR(%0)) 215_WF1(cp15_pmxevcntr_set, CP15_PMXEVCNTRR(%0)) 216_RF0(cp15_pmuserenr_get, CP15_PMUSERENR(%0)) 217_WF1(cp15_pmuserenr_set, CP15_PMUSERENR(%0)) 218_RF0(cp15_pminten_get, CP15_PMINTENSET(%0)) 219_WF1(cp15_pminten_set, CP15_PMINTENSET(%0)) 220_WF1(cp15_pminten_clr, CP15_PMINTENCLR(%0)) 221#endif 222 223_RF0(cp15_tpidrurw_get, CP15_TPIDRURW(%0)) 224_WF1(cp15_tpidrurw_set, CP15_TPIDRURW(%0)) 225_RF0(cp15_tpidruro_get, CP15_TPIDRURO(%0)) 226_WF1(cp15_tpidruro_set, CP15_TPIDRURO(%0)) 227_RF0(cp15_tpidrpwr_get, CP15_TPIDRPRW(%0)) 228_WF1(cp15_tpidrpwr_set, CP15_TPIDRPRW(%0)) 229 230/* Generic Timer registers - only use when you know the hardware is available */ 231_RF0(cp15_cntfrq_get, CP15_CNTFRQ(%0)) 232_WF1(cp15_cntfrq_set, CP15_CNTFRQ(%0)) 233_RF0(cp15_cntkctl_get, CP15_CNTKCTL(%0)) 234_WF1(cp15_cntkctl_set, CP15_CNTKCTL(%0)) 235_RF0(cp15_cntp_tval_get, CP15_CNTP_TVAL(%0)) 236_WF1(cp15_cntp_tval_set, CP15_CNTP_TVAL(%0)) 237_RF0(cp15_cntp_ctl_get, CP15_CNTP_CTL(%0)) 238_WF1(cp15_cntp_ctl_set, CP15_CNTP_CTL(%0)) 239_RF0(cp15_cntv_tval_get, CP15_CNTV_TVAL(%0)) 240_WF1(cp15_cntv_tval_set, CP15_CNTV_TVAL(%0)) 241_RF0(cp15_cntv_ctl_get, CP15_CNTV_CTL(%0)) 242_WF1(cp15_cntv_ctl_set, CP15_CNTV_CTL(%0)) 243_RF0(cp15_cnthctl_get, CP15_CNTHCTL(%0)) 244_WF1(cp15_cnthctl_set, CP15_CNTHCTL(%0)) 245_RF0(cp15_cnthp_tval_get, CP15_CNTHP_TVAL(%0)) 246_WF1(cp15_cnthp_tval_set, CP15_CNTHP_TVAL(%0)) 247_RF0(cp15_cnthp_ctl_get, CP15_CNTHP_CTL(%0)) 248_WF1(cp15_cnthp_ctl_set, CP15_CNTHP_CTL(%0)) 249 250_R64F0(cp15_cntpct_get, CP15_CNTPCT(%Q0, %R0)) 251_R64F0(cp15_cntvct_get, CP15_CNTVCT(%Q0, %R0)) 252_R64F0(cp15_cntp_cval_get, CP15_CNTP_CVAL(%Q0, %R0)) 253_W64F1(cp15_cntp_cval_set, CP15_CNTP_CVAL(%Q0, %R0)) 254_R64F0(cp15_cntv_cval_get, CP15_CNTV_CVAL(%Q0, %R0)) 255_W64F1(cp15_cntv_cval_set, CP15_CNTV_CVAL(%Q0, %R0)) 256_R64F0(cp15_cntvoff_get, CP15_CNTVOFF(%Q0, %R0)) 257_W64F1(cp15_cntvoff_set, CP15_CNTVOFF(%Q0, %R0)) 258_R64F0(cp15_cnthp_cval_get, CP15_CNTHP_CVAL(%Q0, %R0)) 259_W64F1(cp15_cnthp_cval_set, CP15_CNTHP_CVAL(%Q0, %R0)) 260 261#undef _FX 262#undef _RF0 263#undef _WF0 264#undef _WF1 265 266/* 267 * TLB maintenance operations. 268 */ 269 270/* Local (i.e. not broadcasting ) operations. */ 271 272/* Flush all TLB entries (even global). */ 273static __inline void 274tlb_flush_all_local(void) 275{ 276 277 dsb(); 278 _CP15_TLBIALL(); 279 dsb(); 280} 281 282/* Flush all not global TLB entries. */ 283static __inline void 284tlb_flush_all_ng_local(void) 285{ 286 287 dsb(); 288 _CP15_TLBIASID(CPU_ASID_KERNEL); 289 dsb(); 290} 291 292/* Flush single TLB entry (even global). */ 293static __inline void 294tlb_flush_local(vm_offset_t va) 295{ 296 297 KASSERT((va & PAGE_MASK) == 0, ("%s: va %#x not aligned", __func__, va)); 298 299 dsb(); 300 _CP15_TLBIMVA(va | CPU_ASID_KERNEL); 301 dsb(); 302} 303 304/* Flush range of TLB entries (even global). */ 305static __inline void 306tlb_flush_range_local(vm_offset_t va, vm_size_t size) 307{ 308 vm_offset_t eva = va + size; 309 310 KASSERT((va & PAGE_MASK) == 0, ("%s: va %#x not aligned", __func__, va)); 311 KASSERT((size & PAGE_MASK) == 0, ("%s: size %#x not aligned", __func__, 312 size)); 313 314 dsb(); 315 for (; va < eva; va += PAGE_SIZE) 316 _CP15_TLBIMVA(va | CPU_ASID_KERNEL); 317 dsb(); 318} 319 320/* Broadcasting operations. */ 321#if __ARM_ARCH >= 7 && defined SMP 322 323static __inline void 324tlb_flush_all(void) 325{ 326 327 dsb(); 328 _CP15_TLBIALLIS(); 329 dsb(); 330} 331 332static __inline void 333tlb_flush_all_ng(void) 334{ 335 336 dsb(); 337 _CP15_TLBIASIDIS(CPU_ASID_KERNEL); 338 dsb(); 339} 340 341static __inline void 342tlb_flush(vm_offset_t va) 343{ 344 345 KASSERT((va & PAGE_MASK) == 0, ("%s: va %#x not aligned", __func__, va)); 346 347 dsb(); 348 _CP15_TLBIMVAAIS(va); 349 dsb(); 350} 351 352static __inline void 353tlb_flush_range(vm_offset_t va, vm_size_t size) 354{ 355 vm_offset_t eva = va + size; 356 357 KASSERT((va & PAGE_MASK) == 0, ("%s: va %#x not aligned", __func__, va)); 358 KASSERT((size & PAGE_MASK) == 0, ("%s: size %#x not aligned", __func__, 359 size)); 360 361 dsb(); 362 for (; va < eva; va += PAGE_SIZE) 363 _CP15_TLBIMVAAIS(va); 364 dsb(); 365} 366#else /* SMP */ 367 368#define tlb_flush_all() tlb_flush_all_local() 369#define tlb_flush_all_ng() tlb_flush_all_ng_local() 370#define tlb_flush(va) tlb_flush_local(va) 371#define tlb_flush_range(va, size) tlb_flush_range_local(va, size) 372 373#endif /* SMP */ 374 375/* 376 * Cache maintenance operations. 377 */ 378 379/* Sync I and D caches to PoU */ 380static __inline void 381icache_sync(vm_offset_t va, vm_size_t size) 382{ 383 vm_offset_t eva = va + size; 384 385 dsb(); 386 va &= ~cpuinfo.dcache_line_mask; 387 for ( ; va < eva; va += cpuinfo.dcache_line_size) { 388#if __ARM_ARCH >= 7 && defined SMP 389 _CP15_DCCMVAU(va); 390#else 391 _CP15_DCCMVAC(va); 392#endif 393 } 394 dsb(); 395#if __ARM_ARCH >= 7 && defined SMP 396 _CP15_ICIALLUIS(); 397#else 398 _CP15_ICIALLU(); 399#endif 400 dsb(); 401 isb(); 402} 403 404/* Invalidate I cache */ 405static __inline void 406icache_inv_all(void) 407{ 408#if __ARM_ARCH >= 7 && defined SMP 409 _CP15_ICIALLUIS(); 410#else 411 _CP15_ICIALLU(); 412#endif 413 dsb(); 414 isb(); 415} 416 417/* Invalidate branch predictor buffer */ 418static __inline void 419bpb_inv_all(void) 420{ 421#if __ARM_ARCH >= 7 && defined SMP 422 _CP15_BPIALLIS(); 423#else 424 _CP15_BPIALL(); 425#endif 426 dsb(); 427 isb(); 428} 429 430/* Write back D-cache to PoU */ 431static __inline void 432dcache_wb_pou(vm_offset_t va, vm_size_t size) 433{ 434 vm_offset_t eva = va + size; 435 436 dsb(); 437 va &= ~cpuinfo.dcache_line_mask; 438 for ( ; va < eva; va += cpuinfo.dcache_line_size) { 439#if __ARM_ARCH >= 7 && defined SMP 440 _CP15_DCCMVAU(va); 441#else 442 _CP15_DCCMVAC(va); 443#endif 444 } 445 dsb(); 446} 447 448/* 449 * Invalidate D-cache to PoC 450 * 451 * Caches are invalidated from outermost to innermost as fresh cachelines 452 * flow in this direction. In given range, if there was no dirty cacheline 453 * in any cache before, no stale cacheline should remain in them after this 454 * operation finishes. 455 */ 456static __inline void 457dcache_inv_poc(vm_offset_t va, vm_paddr_t pa, vm_size_t size) 458{ 459 vm_offset_t eva = va + size; 460 461 dsb(); 462 /* invalidate L2 first */ 463 cpu_l2cache_inv_range(pa, size); 464 465 /* then L1 */ 466 va &= ~cpuinfo.dcache_line_mask; 467 for ( ; va < eva; va += cpuinfo.dcache_line_size) { 468 _CP15_DCIMVAC(va); 469 } 470 dsb(); 471} 472 473/* 474 * Discard D-cache lines to PoC, prior to overwrite by DMA engine 475 * 476 * Invalidate caches, discarding data in dirty lines. This is useful 477 * if the memory is about to be overwritten, e.g. by a DMA engine. 478 * Invalidate caches from innermost to outermost to follow the flow 479 * of dirty cachelines. 480 */ 481static __inline void 482dcache_dma_preread(vm_offset_t va, vm_paddr_t pa, vm_size_t size) 483{ 484 vm_offset_t eva = va + size; 485 486 /* invalidate L1 first */ 487 dsb(); 488 va &= ~cpuinfo.dcache_line_mask; 489 for ( ; va < eva; va += cpuinfo.dcache_line_size) { 490 _CP15_DCIMVAC(va); 491 } 492 dsb(); 493 494 /* then L2 */ 495 cpu_l2cache_inv_range(pa, size); 496} 497 498/* 499 * Write back D-cache to PoC 500 * 501 * Caches are written back from innermost to outermost as dirty cachelines 502 * flow in this direction. In given range, no dirty cacheline should remain 503 * in any cache after this operation finishes. 504 */ 505static __inline void 506dcache_wb_poc(vm_offset_t va, vm_paddr_t pa, vm_size_t size) 507{ 508 vm_offset_t eva = va + size; 509 510 dsb(); 511 va &= ~cpuinfo.dcache_line_mask; 512 for ( ; va < eva; va += cpuinfo.dcache_line_size) { 513 _CP15_DCCMVAC(va); 514 } 515 dsb(); 516 517 cpu_l2cache_wb_range(pa, size); 518} 519 520/* Write back and invalidate D-cache to PoC */ 521static __inline void 522dcache_wbinv_poc(vm_offset_t sva, vm_paddr_t pa, vm_size_t size) 523{ 524 vm_offset_t va; 525 vm_offset_t eva = sva + size; 526 527 dsb(); 528 /* write back L1 first */ 529 va = sva & ~cpuinfo.dcache_line_mask; 530 for ( ; va < eva; va += cpuinfo.dcache_line_size) { 531 _CP15_DCCMVAC(va); 532 } 533 dsb(); 534 535 /* then write back and invalidate L2 */ 536 cpu_l2cache_wbinv_range(pa, size); 537 538 /* then invalidate L1 */ 539 va = sva & ~cpuinfo.dcache_line_mask; 540 for ( ; va < eva; va += cpuinfo.dcache_line_size) { 541 _CP15_DCIMVAC(va); 542 } 543 dsb(); 544} 545 546/* Set TTB0 register */ 547static __inline void 548cp15_ttbr_set(uint32_t reg) 549{ 550 dsb(); 551 _CP15_TTB_SET(reg); 552 dsb(); 553 _CP15_BPIALL(); 554 dsb(); 555 isb(); 556 tlb_flush_all_ng_local(); 557} 558 559#endif /* _KERNEL */ 560 561#endif /* !MACHINE_CPU_V6_H */ 562