1276333Sian/*- 2295315Smmel * Copyright 2016 Svatopluk Kraus <skra@FreeBSD.org> 3295315Smmel * Copyright 2016 Michal Meloun <mmel@FreeBSD.org> 4276333Sian * All rights reserved. 5276333Sian * 6276333Sian * Redistribution and use in source and binary forms, with or without 7276333Sian * modification, are permitted provided that the following conditions 8276333Sian * are met: 9276333Sian * 1. Redistributions of source code must retain the above copyright 10276333Sian * notice, this list of conditions and the following disclaimer. 11276333Sian * 2. Redistributions in binary form must reproduce the above copyright 12276333Sian * notice, this list of conditions and the following disclaimer in the 13276333Sian * documentation and/or other materials provided with the distribution. 14276333Sian * 15276333Sian * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 16276333Sian * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17276333Sian * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 18276333Sian * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 19276333Sian * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 20276333Sian * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 21276333Sian * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 22276333Sian * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 23276333Sian * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 24276333Sian * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 25276333Sian * SUCH DAMAGE. 26276333Sian * 27276333Sian * $FreeBSD: stable/11/sys/arm/include/cpu-v4.h 328966 2018-02-07 06:27:29Z mmel $ 28276333Sian */ 29295315Smmel#ifndef MACHINE_CPU_V4_H 30295315Smmel#define MACHINE_CPU_V4_H 31276333Sian 32282767Sandrew/* There are no user serviceable parts here, they may change without notice */ 33282767Sandrew#ifndef _KERNEL 34282767Sandrew#error Only include this file in the kernel 35295315Smmel#endif 36282767Sandrew 37295315Smmel#include <machine/atomic.h> 38295315Smmel#include <machine/cpufunc.h> 39295315Smmel#include <machine/cpuinfo.h> 40295315Smmel#include <machine/sysreg.h> 41276333Sian 42295315Smmel#if __ARM_ARCH >= 6 43295319Smmel#error Never include this file for ARMv6 44295315Smmel#else 45295315Smmel 46276333Sian#define CPU_ASID_KERNEL 0 47276333Sian 48276333Sian/* 49276333Sian * Macros to generate CP15 (system control processor) read/write functions. 50276333Sian */ 51276333Sian#define _FX(s...) #s 52276333Sian 53276333Sian#define _RF0(fname, aname...) \ 54328966Smmelstatic __inline uint32_t \ 55276333Sianfname(void) \ 56276333Sian{ \ 57328966Smmel uint32_t reg; \ 58276333Sian __asm __volatile("mrc\t" _FX(aname): "=r" (reg)); \ 59276333Sian return(reg); \ 60276333Sian} 61276333Sian 62280985Sandrew#define _R64F0(fname, aname) \ 63280985Sandrewstatic __inline uint64_t \ 64280985Sandrewfname(void) \ 65280985Sandrew{ \ 66280985Sandrew uint64_t reg; \ 67280985Sandrew __asm __volatile("mrrc\t" _FX(aname): "=r" (reg)); \ 68280985Sandrew return(reg); \ 69280985Sandrew} 70280985Sandrew 71276333Sian#define _WF0(fname, aname...) \ 72276333Sianstatic __inline void \ 73276333Sianfname(void) \ 74276333Sian{ \ 75276333Sian __asm __volatile("mcr\t" _FX(aname)); \ 76276333Sian} 77276333Sian 78276333Sian#define _WF1(fname, aname...) \ 79276333Sianstatic __inline void \ 80328966Smmelfname(uint32_t reg) \ 81276333Sian{ \ 82276333Sian __asm __volatile("mcr\t" _FX(aname):: "r" (reg)); \ 83276333Sian} 84276333Sian 85280985Sandrew 86276333Sian/* 87276333Sian * Publicly accessible functions 88276333Sian */ 89276333Sian 90294740Szbb 91276333Sian/* Various control registers */ 92276333Sian 93283365Sandrew_RF0(cp15_cpacr_get, CP15_CPACR(%0)) 94283365Sandrew_WF1(cp15_cpacr_set, CP15_CPACR(%0)) 95276333Sian_RF0(cp15_dfsr_get, CP15_DFSR(%0)) 96276333Sian_RF0(cp15_ttbr_get, CP15_TTBR0(%0)) 97276333Sian_RF0(cp15_dfar_get, CP15_DFAR(%0)) 98295315Smmel/* XScale */ 99283365Sandrew_RF0(cp15_actlr_get, CP15_ACTLR(%0)) 100283365Sandrew_WF1(cp15_actlr_set, CP15_ACTLR(%0)) 101276333Sian 102276333Sian/*CPU id registers */ 103276333Sian_RF0(cp15_midr_get, CP15_MIDR(%0)) 104276333Sian_RF0(cp15_ctr_get, CP15_CTR(%0)) 105276333Sian_RF0(cp15_tcmtr_get, CP15_TCMTR(%0)) 106276333Sian_RF0(cp15_tlbtr_get, CP15_TLBTR(%0)) 107300533Sian_RF0(cp15_sctlr_get, CP15_SCTLR(%0)) 108276333Sian 109276333Sian#undef _FX 110276333Sian#undef _RF0 111276333Sian#undef _WF0 112276333Sian#undef _WF1 113276333Sian 114289892Sian 115289892Sian/* 116289892Sian * armv4/5 compatibility shims. 117289892Sian * 118289892Sian * These functions provide armv4 cache maintenance using the new armv6 names. 119289892Sian * Included here are just the functions actually used now in common code; it may 120289892Sian * be necessary to add things here over time. 121289892Sian * 122289892Sian * The callers of the dcache functions expect these routines to handle address 123289892Sian * and size values which are not aligned to cacheline boundaries; the armv4 and 124289892Sian * armv5 asm code handles that. 125289892Sian */ 126289892Sian 127289892Sianstatic __inline void 128295319Smmeltlb_flush_all(void) 129295319Smmel{ 130295319Smmel cpu_tlb_flushID(); 131295319Smmel cpu_cpwait(); 132295319Smmel} 133295319Smmel 134295319Smmelstatic __inline void 135295319Smmelicache_sync(vm_offset_t va, vm_size_t size) 136295319Smmel{ 137295319Smmel cpu_icache_sync_range(va, size); 138295319Smmel} 139295319Smmel 140295319Smmelstatic __inline void 141289892Siandcache_inv_poc(vm_offset_t va, vm_paddr_t pa, vm_size_t size) 142289892Sian{ 143289892Sian 144289892Sian cpu_dcache_inv_range(va, size); 145295319Smmel#ifdef ARM_L2_PIPT 146295319Smmel cpu_l2cache_inv_range(pa, size); 147295319Smmel#else 148289892Sian cpu_l2cache_inv_range(va, size); 149295319Smmel#endif 150289892Sian} 151289892Sian 152289892Sianstatic __inline void 153289892Siandcache_inv_poc_dma(vm_offset_t va, vm_paddr_t pa, vm_size_t size) 154289892Sian{ 155289892Sian 156289892Sian /* See armv6 code, above, for why we do L2 before L1 in this case. */ 157295319Smmel#ifdef ARM_L2_PIPT 158295319Smmel cpu_l2cache_inv_range(pa, size); 159295319Smmel#else 160289892Sian cpu_l2cache_inv_range(va, size); 161295319Smmel#endif 162289892Sian cpu_dcache_inv_range(va, size); 163289892Sian} 164289892Sian 165289892Sianstatic __inline void 166289892Siandcache_wb_poc(vm_offset_t va, vm_paddr_t pa, vm_size_t size) 167289892Sian{ 168289892Sian 169289892Sian cpu_dcache_wb_range(va, size); 170295319Smmel#ifdef ARM_L2_PIPT 171295319Smmel cpu_l2cache_wb_range(pa, size); 172295319Smmel#else 173289892Sian cpu_l2cache_wb_range(va, size); 174295319Smmel#endif 175289892Sian} 176289892Sian 177295319Smmelstatic __inline void 178295319Smmeldcache_wbinv_poc_all(void) 179295319Smmel{ 180295319Smmel cpu_idcache_wbinv_all(); 181295319Smmel cpu_l2cache_wbinv_all(); 182295319Smmel} 183295319Smmel 184282767Sandrew#endif /* _KERNEL */ 185282767Sandrew 186295315Smmel#endif /* MACHINE_CPU_V4_H */ 187