1/* SPDX-License-Identifier: GPL-2.0+ */
2/*
3 * include/asm-arm/macro.h
4 *
5 * Copyright (C) 2009 Jean-Christophe PLAGNIOL-VILLARD <plagnioj@jcrosoft.com>
6 */
7
8#ifndef __ASM_ARM_MACRO_H__
9#define __ASM_ARM_MACRO_H__
10
11#ifdef CONFIG_ARM64
12#include <asm/system.h>
13#endif
14
15#ifdef __ASSEMBLY__
16
17/*
18 * These macros provide a convenient way to write 8, 16 and 32 bit data
19 * to any address.
20 * Registers r4 and r5 are used, any data in these registers are
21 * overwritten by the macros.
22 * The macros are valid for any ARM architecture, they do not implement
23 * any memory barriers so caution is recommended when using these when the
24 * caches are enabled or on a multi-core system.
25 */
26
27.macro	write32, addr, data
28	ldr	r4, =\addr
29	ldr	r5, =\data
30	str	r5, [r4]
31.endm
32
33.macro	write16, addr, data
34	ldr	r4, =\addr
35	ldrh	r5, =\data
36	strh	r5, [r4]
37.endm
38
39.macro	write8, addr, data
40	ldr	r4, =\addr
41	ldrb	r5, =\data
42	strb	r5, [r4]
43.endm
44
45/*
46 * This macro generates a loop that can be used for delays in the code.
47 * Register r4 is used, any data in this register is overwritten by the
48 * macro.
49 * The macro is valid for any ARM architeture. The actual time spent in the
50 * loop will vary from CPU to CPU though.
51 */
52
53.macro	wait_timer, time
54	ldr	r4, =\time
551:
56	nop
57	subs	r4, r4, #1
58	bcs	1b
59.endm
60
61#ifdef CONFIG_ARM64
62/*
63 * Register aliases.
64 */
65lr	.req	x30
66
67/*
68 * Branch according to exception level
69 */
70.macro	switch_el, xreg, el3_label, el2_label, el1_label
71	mrs	\xreg, CurrentEL
72	cmp	\xreg, #0x8
73	b.gt	\el3_label
74	b.eq	\el2_label
75	b.lt	\el1_label
76.endm
77
78/*
79 * Branch if we are not in the highest exception level
80 */
81.macro	branch_if_not_highest_el, xreg, label
82	switch_el \xreg, 3f, 2f, 1f
83
842:	mrs	\xreg, ID_AA64PFR0_EL1
85	and	\xreg, \xreg, #(ID_AA64PFR0_EL1_EL3)
86	cbnz	\xreg, \label
87	b	3f
88
891:	mrs	\xreg, ID_AA64PFR0_EL1
90	and	\xreg, \xreg, #(ID_AA64PFR0_EL1_EL3 | ID_AA64PFR0_EL1_EL2)
91	cbnz	\xreg, \label
92
933:
94.endm
95
96/*
97 * Branch if current processor is a Cortex-A57 core.
98 */
99.macro	branch_if_a57_core, xreg, a57_label
100	mrs	\xreg, midr_el1
101	lsr	\xreg, \xreg, #4
102	and	\xreg, \xreg, #0x00000FFF
103	cmp	\xreg, #0xD07		/* Cortex-A57 MPCore processor. */
104	b.eq	\a57_label
105.endm
106
107/*
108 * Branch if current processor is a Cortex-A53 core.
109 */
110.macro	branch_if_a53_core, xreg, a53_label
111	mrs	\xreg, midr_el1
112	lsr	\xreg, \xreg, #4
113	and	\xreg, \xreg, #0x00000FFF
114	cmp	\xreg, #0xD03		/* Cortex-A53 MPCore processor. */
115	b.eq	\a53_label
116.endm
117
118/*
119 * Branch if current processor is a slave,
120 * choose processor with all zero affinity value as the master.
121 */
122.macro	branch_if_slave, xreg, slave_label
123#ifdef CONFIG_ARMV8_MULTIENTRY
124	mrs	\xreg, mpidr_el1
125	and	\xreg, \xreg,  0xffffffffff	/* clear bits [63:40] */
126	and	\xreg, \xreg, ~0x00ff000000	/* also clear bits [31:24] */
127	cbnz	\xreg, \slave_label
128#endif
129.endm
130
131/*
132 * Branch if current processor is a master,
133 * choose processor with all zero affinity value as the master.
134 */
135.macro	branch_if_master, xreg, master_label
136#ifdef CONFIG_ARMV8_MULTIENTRY
137	mrs	\xreg, mpidr_el1
138	and	\xreg, \xreg,  0xffffffffff	/* clear bits [63:40] */
139	and	\xreg, \xreg, ~0x00ff000000	/* also clear bits [31:24] */
140	cbz	\xreg, \master_label
141#else
142	b	\master_label
143#endif
144.endm
145
146/*
147 * Switch from EL3 to EL2 for ARMv8
148 * @ep:     kernel entry point
149 * @flag:   The execution state flag for lower exception
150 *          level, ES_TO_AARCH64 or ES_TO_AARCH32
151 * @tmp:    temporary register
152 *
153 * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
154 * For loading 64-bit OS, x0 is physical address to the FDT blob.
155 * They will be passed to the guest.
156 */
157.macro armv8_switch_to_el2_m, ep, flag, tmp
158	msr	cptr_el3, xzr		/* Disable coprocessor traps to EL3 */
159	mov	\tmp, #CPTR_EL2_RES1
160	msr	cptr_el2, \tmp		/* Disable coprocessor traps to EL2 */
161
162	/* Initialize Generic Timers */
163	msr	cntvoff_el2, xzr
164
165	/* Initialize SCTLR_EL2
166	 *
167	 * setting RES1 bits (29,28,23,22,18,16,11,5,4) to 1
168	 * and RES0 bits (31,30,27,26,24,21,20,17,15-13,10-6) +
169	 * EE,WXN,I,SA,C,A,M to 0
170	 */
171	ldr	\tmp, =(SCTLR_EL2_RES1 | SCTLR_EL2_EE_LE |\
172			SCTLR_EL2_WXN_DIS | SCTLR_EL2_ICACHE_DIS |\
173			SCTLR_EL2_SA_DIS | SCTLR_EL2_DCACHE_DIS |\
174			SCTLR_EL2_ALIGN_DIS | SCTLR_EL2_MMU_DIS)
175	msr	sctlr_el2, \tmp
176
177	mov	\tmp, sp
178	msr	sp_el2, \tmp		/* Migrate SP */
179	mrs	\tmp, vbar_el3
180	msr	vbar_el2, \tmp		/* Migrate VBAR */
181
182	/* Check switch to AArch64 EL2 or AArch32 Hypervisor mode */
183	cmp	\flag, #ES_TO_AARCH32
184	b.eq	1f
185
186	/*
187	 * The next lower exception level is AArch64, 64bit EL2 | HCE |
188	 * RES1 (Bits[5:4]) | Non-secure EL0/EL1.
189	 * and the SMD depends on requirements.
190	 */
191#ifdef CONFIG_ARMV8_PSCI
192	ldr	\tmp, =(SCR_EL3_RW_AARCH64 | SCR_EL3_HCE_EN |\
193			SCR_EL3_RES1 | SCR_EL3_NS_EN)
194#else
195	ldr	\tmp, =(SCR_EL3_RW_AARCH64 | SCR_EL3_HCE_EN |\
196			SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
197			SCR_EL3_NS_EN)
198#endif
199
200#ifdef CONFIG_ARMV8_EA_EL3_FIRST
201	orr	\tmp, \tmp, #SCR_EL3_EA_EN
202#endif
203	msr	scr_el3, \tmp
204
205	/* Return to the EL2_SP2 mode from EL3 */
206	ldr	\tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
207			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
208			SPSR_EL_M_AARCH64 | SPSR_EL_M_EL2H)
209	msr	spsr_el3, \tmp
210	msr	elr_el3, \ep
211	eret
212
2131:
214	/*
215	 * The next lower exception level is AArch32, 32bit EL2 | HCE |
216	 * SMD | RES1 (Bits[5:4]) | Non-secure EL0/EL1.
217	 */
218	ldr	\tmp, =(SCR_EL3_RW_AARCH32 | SCR_EL3_HCE_EN |\
219			SCR_EL3_SMD_DIS | SCR_EL3_RES1 |\
220			SCR_EL3_NS_EN)
221	msr	scr_el3, \tmp
222
223	/* Return to AArch32 Hypervisor mode */
224	ldr     \tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
225			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
226			SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
227			SPSR_EL_M_HYP)
228	msr	spsr_el3, \tmp
229	msr     elr_el3, \ep
230	eret
231.endm
232
233/*
234 * Switch from EL2 to EL1 for ARMv8
235 * @ep:     kernel entry point
236 * @flag:   The execution state flag for lower exception
237 *          level, ES_TO_AARCH64 or ES_TO_AARCH32
238 * @tmp:    temporary register
239 *
240 * For loading 32-bit OS, x1 is machine nr and x2 is ftaddr.
241 * For loading 64-bit OS, x0 is physical address to the FDT blob.
242 * They will be passed to the guest.
243 */
244.macro armv8_switch_to_el1_m, ep, flag, tmp, tmp2
245	/* Initialize Generic Timers */
246	mrs	\tmp, cnthctl_el2
247	/* Enable EL1 access to timers */
248	orr	\tmp, \tmp, #(CNTHCTL_EL2_EL1PCEN_EN |\
249		CNTHCTL_EL2_EL1PCTEN_EN)
250	msr	cnthctl_el2, \tmp
251	msr	cntvoff_el2, xzr
252
253	/* Initilize MPID/MPIDR registers */
254	mrs	\tmp, midr_el1
255	msr	vpidr_el2, \tmp
256	mrs	\tmp, mpidr_el1
257	msr	vmpidr_el2, \tmp
258
259	/* Disable coprocessor traps */
260	mov	\tmp, #CPTR_EL2_RES1
261	msr	cptr_el2, \tmp		/* Disable coprocessor traps to EL2 */
262	msr	hstr_el2, xzr		/* Disable coprocessor traps to EL2 */
263	mov	\tmp, #CPACR_EL1_FPEN_EN
264	msr	cpacr_el1, \tmp		/* Enable FP/SIMD at EL1 */
265
266	/* SCTLR_EL1 initialization
267	 *
268	 * setting RES1 bits (29,28,23,22,20,11) to 1
269	 * and RES0 bits (31,30,27,21,17,13,10,6) +
270	 * UCI,EE,EOE,WXN,nTWE,nTWI,UCT,DZE,I,UMA,SED,ITD,
271	 * CP15BEN,SA0,SA,C,A,M to 0
272	 */
273	ldr	\tmp, =(SCTLR_EL1_RES1 | SCTLR_EL1_UCI_DIS |\
274			SCTLR_EL1_EE_LE | SCTLR_EL1_WXN_DIS |\
275			SCTLR_EL1_NTWE_DIS | SCTLR_EL1_NTWI_DIS |\
276			SCTLR_EL1_UCT_DIS | SCTLR_EL1_DZE_DIS |\
277			SCTLR_EL1_ICACHE_DIS | SCTLR_EL1_UMA_DIS |\
278			SCTLR_EL1_SED_EN | SCTLR_EL1_ITD_EN |\
279			SCTLR_EL1_CP15BEN_DIS | SCTLR_EL1_SA0_DIS |\
280			SCTLR_EL1_SA_DIS | SCTLR_EL1_DCACHE_DIS |\
281			SCTLR_EL1_ALIGN_DIS | SCTLR_EL1_MMU_DIS)
282	msr	sctlr_el1, \tmp
283
284	mov	\tmp, sp
285	msr	sp_el1, \tmp		/* Migrate SP */
286	mrs	\tmp, vbar_el2
287	msr	vbar_el1, \tmp		/* Migrate VBAR */
288
289	/* Check switch to AArch64 EL1 or AArch32 Supervisor mode */
290	cmp	\flag, #ES_TO_AARCH32
291	b.eq	1f
292
293	/* Initialize HCR_EL2 */
294	/* Only disable PAuth traps if PAuth is supported */
295	mrs	\tmp, id_aa64isar1_el1
296	ldr	\tmp2, =(ID_AA64ISAR1_EL1_GPI | ID_AA64ISAR1_EL1_GPA | \
297		      ID_AA64ISAR1_EL1_API | ID_AA64ISAR1_EL1_APA)
298	tst	\tmp, \tmp2
299	mov	\tmp2, #(HCR_EL2_RW_AARCH64 | HCR_EL2_HCD_DIS)
300	orr	\tmp, \tmp2, #(HCR_EL2_APK | HCR_EL2_API)
301	csel	\tmp, \tmp2, \tmp, eq
302	msr	hcr_el2, \tmp
303
304	/* Return to the EL1_SP1 mode from EL2 */
305	ldr	\tmp, =(SPSR_EL_DEBUG_MASK | SPSR_EL_SERR_MASK |\
306			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
307			SPSR_EL_M_AARCH64 | SPSR_EL_M_EL1H)
308	msr	spsr_el2, \tmp
309	msr     elr_el2, \ep
310	eret
311
3121:
313	/* Initialize HCR_EL2 */
314	ldr	\tmp, =(HCR_EL2_RW_AARCH32 | HCR_EL2_HCD_DIS)
315	msr	hcr_el2, \tmp
316
317	/* Return to AArch32 Supervisor mode from EL2 */
318	ldr	\tmp, =(SPSR_EL_END_LE | SPSR_EL_ASYN_MASK |\
319			SPSR_EL_IRQ_MASK | SPSR_EL_FIQ_MASK |\
320			SPSR_EL_T_A32 | SPSR_EL_M_AARCH32 |\
321			SPSR_EL_M_SVC)
322	msr     spsr_el2, \tmp
323	msr     elr_el2, \ep
324	eret
325.endm
326
327#if defined(CONFIG_GICV3)
328.macro gic_wait_for_interrupt_m xreg1
3290 :	wfi
330	mrs     \xreg1, ICC_IAR1_EL1
331	msr     ICC_EOIR1_EL1, \xreg1
332	cbnz    \xreg1, 0b
333.endm
334#elif defined(CONFIG_GICV2)
335.macro gic_wait_for_interrupt_m xreg1, wreg2
3360 :	wfi
337	ldr     \wreg2, [\xreg1, GICC_AIAR]
338	str     \wreg2, [\xreg1, GICC_AEOIR]
339	and	\wreg2, \wreg2, #0x3ff
340	cbnz    \wreg2, 0b
341.endm
342#endif
343
344#endif /* CONFIG_ARM64 */
345
346#endif /* __ASSEMBLY__ */
347#endif /* __ASM_ARM_MACRO_H__ */
348