exception.S revision 319204
1/*- 2 * Copyright (c) 2014 Andrew Turner 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright 11 * notice, this list of conditions and the following disclaimer in the 12 * documentation and/or other materials provided with the distribution. 13 * 14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 * 26 */ 27 28#include <machine/asm.h> 29__FBSDID("$FreeBSD: stable/11/sys/arm64/arm64/exception.S 319204 2017-05-30 13:02:10Z andrew $"); 30 31#include "assym.s" 32 33 .text 34 35.macro save_registers el 36.if \el == 1 37 mov x18, sp 38 sub sp, sp, #128 39.endif 40 sub sp, sp, #(TF_SIZE + 16) 41 stp x29, x30, [sp, #(TF_SIZE)] 42 stp x28, x29, [sp, #(TF_X + 28 * 8)] 43 stp x26, x27, [sp, #(TF_X + 26 * 8)] 44 stp x24, x25, [sp, #(TF_X + 24 * 8)] 45 stp x22, x23, [sp, #(TF_X + 22 * 8)] 46 stp x20, x21, [sp, #(TF_X + 20 * 8)] 47 stp x18, x19, [sp, #(TF_X + 18 * 8)] 48 stp x16, x17, [sp, #(TF_X + 16 * 8)] 49 stp x14, x15, [sp, #(TF_X + 14 * 8)] 50 stp x12, x13, [sp, #(TF_X + 12 * 8)] 51 stp x10, x11, [sp, #(TF_X + 10 * 8)] 52 stp x8, x9, [sp, #(TF_X + 8 * 8)] 53 stp x6, x7, [sp, #(TF_X + 6 * 8)] 54 stp x4, x5, [sp, #(TF_X + 4 * 8)] 55 stp x2, x3, [sp, #(TF_X + 2 * 8)] 56 stp x0, x1, [sp, #(TF_X + 0 * 8)] 57 mrs x10, elr_el1 58 mrs x11, spsr_el1 59 mrs x12, esr_el1 60.if \el == 0 61 mrs x18, sp_el0 62.endif 63 str x10, [sp, #(TF_ELR)] 64 stp w11, w12, [sp, #(TF_SPSR)] 65 stp x18, lr, [sp, #(TF_SP)] 66 mrs x18, tpidr_el1 67 add x29, sp, #(TF_SIZE) 68.endm 69 70.macro restore_registers el 71.if \el == 1 72 msr daifset, #2 73 /* 74 * Disable interrupts, x18 may change in the interrupt exception 75 * handler. For EL0 exceptions, do_ast already did this. 76 */ 77.endif 78 ldp x18, lr, [sp, #(TF_SP)] 79 ldp x10, x11, [sp, #(TF_ELR)] 80.if \el == 0 81 msr sp_el0, x18 82.endif 83 msr spsr_el1, x11 84 msr elr_el1, x10 85 ldp x0, x1, [sp, #(TF_X + 0 * 8)] 86 ldp x2, x3, [sp, #(TF_X + 2 * 8)] 87 ldp x4, x5, [sp, #(TF_X + 4 * 8)] 88 ldp x6, x7, [sp, #(TF_X + 6 * 8)] 89 ldp x8, x9, [sp, #(TF_X + 8 * 8)] 90 ldp x10, x11, [sp, #(TF_X + 10 * 8)] 91 ldp x12, x13, [sp, #(TF_X + 12 * 8)] 92 ldp x14, x15, [sp, #(TF_X + 14 * 8)] 93 ldp x16, x17, [sp, #(TF_X + 16 * 8)] 94.if \el == 0 95 /* 96 * We only restore the callee saved registers when returning to 97 * userland as they may have been updated by a system call or signal. 98 */ 99 ldp x18, x19, [sp, #(TF_X + 18 * 8)] 100 ldp x20, x21, [sp, #(TF_X + 20 * 8)] 101 ldp x22, x23, [sp, #(TF_X + 22 * 8)] 102 ldp x24, x25, [sp, #(TF_X + 24 * 8)] 103 ldp x26, x27, [sp, #(TF_X + 26 * 8)] 104 ldp x28, x29, [sp, #(TF_X + 28 * 8)] 105.else 106 ldr x29, [sp, #(TF_X + 29 * 8)] 107.endif 108.if \el == 0 109 add sp, sp, #(TF_SIZE + 16) 110.else 111 mov sp, x18 112 mrs x18, tpidr_el1 113.endif 114.endm 115 116.macro do_ast 117 /* Disable interrupts */ 118 mrs x19, daif 1191: 120 msr daifset, #2 121 122 /* Read the current thread flags */ 123 ldr x1, [x18, #PC_CURTHREAD] /* Load curthread */ 124 ldr x2, [x1, #TD_FLAGS] 125 126 /* Check if we have either bits set */ 127 mov x3, #((TDF_ASTPENDING|TDF_NEEDRESCHED) >> 8) 128 lsl x3, x3, #8 129 and x2, x2, x3 130 cbz x2, 2f 131 132 /* Restore interrupts */ 133 msr daif, x19 134 135 /* handle the ast */ 136 mov x0, sp 137 bl _C_LABEL(ast) 138 139 /* Re-check for new ast scheduled */ 140 b 1b 1412: 142.endm 143 144ENTRY(handle_el1h_sync) 145 save_registers 1 146 mov x0, sp 147 bl do_el1h_sync 148 restore_registers 1 149 eret 150END(handle_el1h_sync) 151 152ENTRY(handle_el1h_irq) 153 save_registers 1 154 mov x0, sp 155 bl intr_irq_handler 156 restore_registers 1 157 eret 158END(handle_el1h_irq) 159 160ENTRY(handle_el1h_error) 161 brk 0xf13 162END(handle_el1h_error) 163 164ENTRY(handle_el0_sync) 165 save_registers 0 166 mov x0, sp 167 bl do_el0_sync 168 do_ast 169 restore_registers 0 170 eret 171END(handle_el0_sync) 172 173ENTRY(handle_el0_irq) 174 save_registers 0 175 mov x0, sp 176 bl intr_irq_handler 177 do_ast 178 restore_registers 0 179 eret 180END(handle_el0_irq) 181 182ENTRY(handle_el0_error) 183 save_registers 0 184 mov x0, sp 185 bl do_el0_error 186 brk 0xf23 187 1: b 1b 188END(handle_el0_error) 189 190.macro vempty 191 .align 7 192 brk 0xfff 193 1: b 1b 194.endm 195 196.macro vector name 197 .align 7 198 b handle_\name 199.endm 200 201 .align 11 202 .globl exception_vectors 203exception_vectors: 204 vempty /* Synchronous EL1t */ 205 vempty /* IRQ EL1t */ 206 vempty /* FIQ EL1t */ 207 vempty /* Error EL1t */ 208 209 vector el1h_sync /* Synchronous EL1h */ 210 vector el1h_irq /* IRQ EL1h */ 211 vempty /* FIQ EL1h */ 212 vector el1h_error /* Error EL1h */ 213 214 vector el0_sync /* Synchronous 64-bit EL0 */ 215 vector el0_irq /* IRQ 64-bit EL0 */ 216 vempty /* FIQ 64-bit EL0 */ 217 vector el0_error /* Error 64-bit EL0 */ 218 219 vempty /* Synchronous 32-bit EL0 */ 220 vempty /* IRQ 32-bit EL0 */ 221 vempty /* FIQ 32-bit EL0 */ 222 vempty /* Error 32-bit EL0 */ 223 224