1/*
2 * Copyright (c) 2015, ETH Zurich.
3 * Copyright (c) 2015, Hewlett Packard Enterprise Development LP.
4 * All rights reserved.
5 *
6 * This file is distributed under the terms in the attached LICENSE file.
7 * If you do not find this file, copies can be found by writing to:
8 * ETH Zurich D-INFK, Universitaetstr. 6, CH-8092 Zurich. Attn: Systems Group.
9 */
10
11#ifndef __SYSREG_H__
12#define __SYSREG_H__
13
14#include <bitmacros.h>
15
16void sysreg_invalidate_i_and_d_caches_fast(void);
17void sysreg_invalidate_i_and_d_caches(void);
18void sysreg_invalidate_tlb_fn(void);
19void sysreg_enable_mmu(void);
20
21
22/*
23 * ============================================================================
24 * System register from section C5.2
25 * ============================================================================
26 */
27
28#define ARMV8_SYSREG_WRITE_FN(_name, _reg, _bits) \
29        static inline void \
30        armv8_sysreg_write_## _bits ## _ ## _name(uint## _bits ## _t val) { \
31            __asm volatile ("msr "#_reg ", %[val]\n" \
32                            "isb \n" : : [val] "r" (val)); \
33        }
34
35#define ARMV8_SYSREG_READ_FN(_name, _reg, _bits) \
36        static inline uint## _bits ## _t \
37        armv8_sysreg_read_## _bits ## _ ## _name(void) { \
38            uint## _bits ## _t val; \
39            __asm volatile("mrs %[val], "#_reg "\n" \
40                           "isb \n" : [val] "=r" (val)); \
41            return val; \
42        }
43
44#define ARMV8_SYSREG_WO(_name, _reg, _bits) \
45    ARMV8_SYSREG_WRITE_FN(_name, _reg, _bits)
46
47#define ARMV8_SYSREG_RO(_name, _reg, _bits) \
48    ARMV8_SYSREG_READ_FN(_name, _reg, _bits)
49
50#define ARMV8_SYSREG_RW(_name, _reg, _bits) \
51    ARMV8_SYSREG_READ_FN(_name, _reg, _bits) \
52    ARMV8_SYSREG_WRITE_FN(_name, _reg, _bits)
53
54
55/*
56 * System register from section C5.2
57 */
58#include "sysreg_spec.h"
59
60//ARMV8_SYSREG_RO(current_el, CurrentEL, 32)
61//ARMV8_SYSREG_RW(daif, DAIF, 32)
62//ARMV8_SYSREG_RW(dlr_el0, DLR_EL0, 64)
63//ARMV8_SYSREG_RW(dspsr_el0, DSPSR_EL0, 32)
64//ARMV8_SYSREG_RW(elr_el1, ELR_EL1, 64)
65//ARMV8_SYSREG_RW(elr_el2, ELR_EL2, 64)
66//ARMV8_SYSREG_RW(elr_el3, ELR_EL3, 64)
67//ARMV8_SYSREG_RW(fpcr, FCPR, 32)
68//ARMV8_SYSREG_RW(fpsr, FPSR, 32)
69//ARMV8_SYSREG_RW(nzcv, NZCV, 32)
70//ARMV8_SYSREG_RW(sp_el0, SP_EL0, 64)
71//ARMV8_SYSREG_RW(sp_el1, SP_EL1, 64)
72//ARMV8_SYSREG_RW(sp_el2, SP_EL2, 64)
73//ARMV8_SYSREG_RW(sp_el3, SP_EL3, 64)
74//ARMV8_SYSREG_RW(spsel, SPSel, 32)
75//ARMV8_SYSREG_RW(spsr_abt, SPSR_abt, 32)
76//ARMV8_SYSREG_RW(spsr_fiq, SPSR_fiq, 32)
77//ARMV8_SYSREG_RW(spsr_irq, SPSR_irq, 32)
78//ARMV8_SYSREG_RW(spsr_und, SPSR_und, 32)
79//ARMV8_SYSREG_RW(spsr_el1, SPSR_EL1, 32)
80//ARMV8_SYSREG_RW(spsr_el2, SPSR_EL2, 32)
81//ARMV8_SYSREG_RW(spsr_el3, SPSR_EL3, 32)
82//
83//ARMV8_SYSREG_RO(id_aa64pfr0_el1, id_aa64pfr0_el1, 64)
84//
85//ARMV8_SYSREG_RW(CPACR_EL1, CPACR_EL1, 32)
86//ARMV8_SYSREG_RW(esr_el1, esr_el1, 64)
87//
88//ARMV8_SYSREG_RW(dfsr, dfsr, 64)
89//ARMV8_SYSREG_RW(ifsr, ifsr, 64)
90//
91//ARMV8_SYSREG_RW(hcr_el2, hcr_el2, 64)
92//ARMV8_SYSREG_RW(scr_el3, scr_el3, 32)
93//
94//ARMV8_SYSREG_RW(mdcr_el2, mdcr_el2, 32)
95//ARMV8_SYSREG_RW(mdcr_el3, mdcr_el3, 32)
96//
97//
98//ARMV8_SYSREG_RW(ttbr0_el1, ttbr0_el1, 64)
99//ARMV8_SYSREG_RW(ttbr0_el2, ttbr0_el2, 64)
100//ARMV8_SYSREG_RW(ttbr0_el3, ttbr0_el3, 64)
101//ARMV8_SYSREG_RW(ttbr1_el1, ttbr1_el1, 64)
102//
103//ARMV8_SYSREG_RW(mair_el1, mair_el1, 64)
104//ARMV8_SYSREG_RW(mair_el2, mair_el2, 64)
105//ARMV8_SYSREG_RW(mair_el3, mair_el3, 64)
106//
107///* gic registers */
108//ARMV8_SYSREG_RW(ICC_AP0R0_EL1, S3_0_C12_C8_4, 32)
109//ARMV8_SYSREG_RW(ICC_AP0R1_EL1, S3_0_C12_C8_5, 32)
110//ARMV8_SYSREG_RW(ICC_AP0R2_EL1, S3_0_C12_C8_6, 32)
111//ARMV8_SYSREG_RW(ICC_AP0R3_EL1, S3_0_C12_C8_7, 32)
112//ARMV8_SYSREG_RW(ICC_AP1R0_EL1, S3_0_C12_C9_0, 32)
113//ARMV8_SYSREG_RW(ICC_AP1R1_EL1, S3_0_C12_C9_1, 32)
114//ARMV8_SYSREG_RW(ICC_AP1R2_EL1, S3_0_C12_C9_2, 32)
115//ARMV8_SYSREG_RW(ICC_AP1R3_EL1, S3_0_C12_C9_3, 32)
116//ARMV8_SYSREG_RW(ICC_ASGI1R_EL1, S3_0_C12_C11_6, 64)
117//ARMV8_SYSREG_RW(ICC_BPR0_EL1, S3_0_C12_C8_3, 32)
118//ARMV8_SYSREG_RW(ICC_BPR1_EL1, S3_0_C12_C12_3, 32)
119//ARMV8_SYSREG_RW(ICC_CTLR_EL1, S3_0_C12_C12_4, 32)
120//ARMV8_SYSREG_RW(ICC_DIR_EL1, S3_0_C12_C11_1, 32)
121//ARMV8_SYSREG_RW(ICC_EOI1_EL1, ICC_EOI1_EL1, 32)
122//ARMV8_SYSREG_RW(ICC_EOIR0_EL1, S3_0_C12_C8_1, 32)
123//ARMV8_SYSREG_RW(ICC_EOIR1_EL1, S3_0_C12_C12_1, 32)
124//ARMV8_SYSREG_RW(ICC_HPPIR0_EL1, S3_0_C12_C8_2, 32)
125//ARMV8_SYSREG_RW(ICC_HPPIR1_EL1, S3_0_C12_C12_2, 32)
126//ARMV8_SYSREG_RW(ICC_IAR0_EL1, S3_0_C12_C8_0, 32)
127//ARMV8_SYSREG_RW(ICC_IAR1_EL1, S3_0_C12_C12_0, 32)
128//ARMV8_SYSREG_RW(ICC_IGRPEN0_EL1, S3_0_C12_C12_6, 32)
129//ARMV8_SYSREG_RW(ICC_IGRPEN1_EL1, S3_0_C12_C12_7, 32)
130//ARMV8_SYSREG_RW(ICC_PMR_EL1, S3_0_C4_C6_0, 32)
131//ARMV8_SYSREG_RW(ICC_RPR_EL1,    S3_0_C12_C11_3, 32)
132//ARMV8_SYSREG_RW(ICC_SGI0R_EL1, S3_0_C12_C11_7, 64)
133//ARMV8_SYSREG_RW(ICC_SGI1R_EL1, S3_0_C12_C11_5, 64)
134//ARMV8_SYSREG_RW(ICC_SRE_EL1, S3_0_C12_C12_5, 32)
135//
136//ARMV8_SYSREG_RW(SCTLR_EL1, SCTLR_EL1, 32)
137//ARMV8_SYSREG_RW(SCTLR_EL2, SCTLR_EL2, 32)
138//ARMV8_SYSREG_RW(SCTLR_EL3, SCTLR_EL3, 32)
139//
140//
141//ARMV8_SYSREG_RW(TCR_EL1, TCR_EL1, 64)
142//ARMV8_SYSREG_RW(TCR_EL2, TCR_EL2, 32)
143//ARMV8_SYSREG_RW(TTBCR, TTBCR, 32)
144//
145///* counter registers */
146//ARMV8_SYSREG_RO(cntfrq_el0, cntfrq_el0, 32)
147//ARMV8_SYSREG_RW(cnthctl_el2, cnthctl_el2, 32)
148//ARMV8_SYSREG_RW(cntkctl_el1, cntkctl_el1, 32)
149//ARMV8_SYSREG_RW(cnthp_ctl_el2, cnthp_ctl_el2, 32)
150//ARMV8_SYSREG_RW(cnthp_cval_el2, cnthp_cval_el2, 64)
151//ARMV8_SYSREG_RW(cnthp_tval_el2, cnthp_tval_el2, 64)
152//ARMV8_SYSREG_RW(cntp_ctl_el0, cntp_ctl_el0, 32)
153//ARMV8_SYSREG_RW(cntp_cval_el0, cntp_cval_el0, 64)
154//ARMV8_SYSREG_RW(cntp_tval_el0, cntp_tval_el0, 64)
155//ARMV8_SYSREG_RW(cntpct_el0, cntpct_el0, 64)
156//ARMV8_SYSREG_RW(cntps_ctl_el1, cntps_ctl_el1, 32)
157//ARMV8_SYSREG_RW(cntps_cval_el1, cntps_cval_el1, 64)
158//ARMV8_SYSREG_RW(cntps_tval_el1, cntps_tval_el1, 64)
159//ARMV8_SYSREG_RW(cntv_ctl_el0, cntv_ctl_el0, 32)
160//ARMV8_SYSREG_RW(cntv_cval_el0, cntv_cval_el0, 64)
161//ARMV8_SYSREG_RW(cntv_tval_el0, cntv_tval_el0, 32)
162//ARMV8_SYSREG_RO(cntvct_el0, cntvct_el0, 64)
163//ARMV8_SYSREG_RW(cntvoff_el2, cntvoff_el2, 64)
164
165
166/*
167 * ============================================================================
168 * C5.3 A64 system instructions for cache maintenance
169 * ============================================================================
170 */
171
172#define ARMV8_CACHE_CTRL_WRITE_FN(_name, _reg, _bits) \
173        static inline void \
174        armv8_cache_ctrl_write_## _bits ## _ ## _name(uint## _bits ## _t val) { \
175            __asm volatile ("dc "#_reg ", %[val]\n" \
176                            "isb \n" : : [val] "r" (val)); \
177        }
178
179#define ARMV8_CACHE_CTRL_READ_FN(_name, _reg, _bits) \
180        static inline uint## _bits ## _t \
181        armv8_cache_ctrl_read_## _bits ## _ ## _name(void) { \
182            uint## _bits ## _t val; \
183            __asm volatile("dc %[val], "#_reg "\n" \
184                           "isb \n" : [val] "=r" (val)); \
185            return val; \
186        }
187
188#define ARMV8_CACHE_CTRL_WO(_name, _reg, _bits) \
189    ARMV8_CACHE_CTRL_WRITE_FN(_name, _reg, _bits)
190
191
192ARMV8_CACHE_CTRL_WO(cisw,CISW,64)
193ARMV8_CACHE_CTRL_WO(civac, CIVAC,64)
194ARMV8_CACHE_CTRL_WO(csw, CSW, 64)
195ARMV8_CACHE_CTRL_WO(cvac, CVAC, 64)
196ARMV8_CACHE_CTRL_WO(cvau, CVAU, 64)
197ARMV8_CACHE_CTRL_WO(isw, ISW, 64)
198ARMV8_CACHE_CTRL_WO(ivac, IVAC, 64)
199ARMV8_CACHE_CTRL_WO(zva, zva, 64)
200
201
202
203/**
204 * \brief Read instruction fault status register.
205 */
206static inline uint64_t
207sysreg_read_ifsr(void) {
208    uint64_t ifsr;
209    __asm volatile("mrs %[ifsr], esr_el1" : [ifsr] "=r" (ifsr));
210    return ifsr;
211}
212
213
214/**
215 * \brief Read data fault status register.
216 */
217static inline uint64_t
218sysreg_read_dfsr(void) {
219    uint64_t dfsr;
220    __asm volatile("mrs %[dfsr], esr_el1" : [dfsr] "=r" (dfsr));
221    return dfsr;
222}
223
224/**
225 * \brief Read fault address register.
226 */
227static inline uint64_t
228sysreg_read_far(void) {
229    uint64_t addr;
230    __asm volatile("mrs %[addr], far_el1" : [addr] "=r" (addr));
231    return addr;
232}
233
234/**
235 * \brief Read Exception Syndrome Register EL1.
236 */
237static inline uint64_t
238sysreg_read_esr_el1(void) {
239    uint64_t addr;
240    __asm volatile("mrs %[addr], esr_el1" : [addr] "=r" (addr));
241    return addr;
242}
243
244
245static inline uint64_t
246sysreg_read_ttbcr(void) {
247    uint64_t ttbcr;
248    __asm volatile(" mrs %[ttbcr], tcr_el1" : [ttbcr] "=r" (ttbcr));
249    return ttbcr;
250}
251
252static inline void
253sysreg_write_ttbcr(uint64_t ttbcr) {
254    __asm volatile("msr tcr_el1, %[ttbcr]" : : [ttbcr] "r" (ttbcr));
255}
256
257static inline uint64_t
258sysreg_read_cache_status(void) {
259    uint64_t cache;
260    __asm volatile(" mrs %[cache], sctlr_el1" : [cache] "=r" (cache));
261    return cache;
262}
263
264static inline void
265sysreg_invalidate_tlb(void) {
266    __asm volatile("tlbi vmalle1");
267}
268
269static inline uint8_t
270sysreg_get_cpu_id(void) {
271    uint8_t mpidr;
272    __asm volatile("mrs %[mpidr], mpidr_el1" : [mpidr] "=r" (mpidr));
273    return mpidr & 0x3;
274}
275
276/**
277 * Read the counter-timer frequency register.
278 * See AArch64 generic timer registers.
279 */
280static inline uint32_t
281sysreg_read_cntfrq_el0(void) {
282    uint32_t frq;
283    __asm volatile("mrs %[frq], cntfrq_el0" : [frq] "=r" (frq));
284    return frq;
285}
286
287
288static inline uint64_t
289sysreg_read_sp(void) {
290    uint64_t sp;
291    __asm volatile("mov %[sp], sp" : [sp] "=r" (sp));
292    return sp;
293}
294
295static inline void
296sysreg_write_sp(uint64_t sp) {
297    __asm volatile("mov sp, %[sp]" : : [sp] "r" (sp));
298}
299
300static inline void
301sysreg_write_vbar_el1(uint64_t vbar_el1) {
302    __asm volatile("msr vbar_el1, %[vbar_el1]" : : [vbar_el1] "r" (vbar_el1));
303}
304
305static inline void
306sysreg_write_sp_el1(uint64_t sp_el1) {
307    __asm volatile("msr sp_el1, %[sp_el1]" : : [sp_el1] "r" (sp_el1));
308}
309
310static inline void
311sysreg_write_tpidrro_el0(uint64_t x) {
312    __asm volatile("msr tpidrro_el0, %[x]" : : [x] "r" (x));
313}
314
315static inline void
316sysreg_write_tpidr_el1(uint64_t x) {
317    __asm volatile("msr tpidr_el1, %[x]" : : [x] "r" (x));
318}
319
320static inline uint64_t
321sysreg_get_id_aa64mmfr0_el1(void) {
322    uint64_t pfr;
323    __asm volatile("mrs %[pfr], id_aa64mmfr0_el1" : [pfr] "=r" (pfr));
324    return pfr;
325}
326
327
328static inline uint64_t
329sysreg_read_par_el1(void) {
330    uint64_t par_el1;
331    __asm volatile("mrs %[x], par_el1" : [x] "=r" (par_el1));
332    return par_el1;
333}
334
335static inline uint32_t
336sysreg_read_isr_el1(void) {
337    uint32_t x;
338    __asm volatile("mrs %[x], isr_el1" : [x] "=r" (x));
339    return x;
340}
341
342#endif // __SYSREG_H__
343