1/* 2 * 64-bit ARM lock definitions. 3 */ 4/* eeeeeeeeeeep. */ 5 6#include <assym.s> 7#include <arm/asm_help.h> 8 9/** 10 * hw_lock_init/arm_usimple_lock_init 11 * 12 * Initialize a lock and all of its bits to zero. 13 */ 14.align 6 15.globl _hw_lock_init 16.globl _arm_usimple_lock_init 17_hw_lock_init: 18_arm_usimple_lock_init: 19 str wzr, [x0] 20 ret lr 21 22/** 23 * hw_lock_held 24 * 25 * Check a lock and see if the interlock bit is set or not. 26 */ 27.align 6 28.globl _hw_lock_held 29_hw_lock_held: 30 ldr w3, [x0] 31 mov x0, xzr /* Clobber register. */ 32 ands w0, w3, #1 33 ret lr 34 35/** 36 * lck_spin_unlock/hw_lock_unlock 37 */ 38.align 6 39.globl _lck_spin_unlock 40.globl _hw_lock_unlock 41.globl _lck_mtx_ilk_unlock 42_lck_spin_unlock: 43_lck_mtx_ilk_unlock: 44_hw_lock_unlock: 45 ldr w3, [x0] 46 movz w2, #1 47 bic w3, w3, w2 48 str w3, [x0] 49 b __enable_preemption 50 51/** 52 * arm_usimple_lock and friends. 53 */ 54.align 6 55.globl _arm_usimple_lock 56.globl _lck_spin_lock 57.globl _hw_lock_lock 58_arm_usimple_lock: 59_lck_spin_lock: 60_hw_lock_lock: 61 /* Load the current thread register. */ 62 mrs x4, tpidr_el1 63 64 /* Increment thread preemption count. */ 65 ldr w5, [x4, MACHINE_THREAD_PREEMPT_COUNT] 66 adds w5, w5, #1 67 str w5, [x4, MACHINE_THREAD_PREEMPT_COUNT] 68 69 /* Operation. */ 70 ldr w3, [x0] 71 movz w2, #1 72 orr w1, w3, #1 73 ands w2, w2, w3 74 b.eq .L_lock_out 75 76.L_lock_panic: 77 mov x1, x0 78 ldr w2, [x1] 79 adr x0, L_lock_panic_string 80 bl _panic 81 b . 82 83.L_lock_out: 84 str w1, [x0] 85 ret lr 86 87L_lock_panic_string: 88 .asciz "hw_lock_lock(): LOCK 0x%016x = 0x%08x" 89 90/** 91 * arm_usimple_lock_try and friends 92 */ 93.align 6 94.globl _arm_usimple_lock_try 95.globl _lck_spin_try_lock 96.globl _hw_lock_try 97_arm_usimple_lock_try: 98_lck_spin_try_lock: 99_hw_lock_try: 100 mrs x1, daif 101 102 /* Disable interrupts. */ 103 movz x2, #0xC0 104 bic x3, x1, x2 105 msr daif, x3 106 107 /* Operation. */ 108 ldr w3, [x0] 109 movz w2, #1 110 orr w4, w3, #1 111 ands w2, w2, w3 112 113 b.eq .L_lock_try_store_lock 114 b.ne .L_lock_try_store_fail 115 116.L_lock_try_increment_preempt: 117 /* Increment preemption level. */ 118 mrs x4, tpidr_el1 119 120 /* Increment thread preemption count. */ 121 ldr w5, [x4, MACHINE_THREAD_PREEMPT_COUNT] 122 adds w5, w5, #1 123 str w5, [x4, MACHINE_THREAD_PREEMPT_COUNT] 124 125 movz x0, #1 126 b .L_lock_try_store_exit 127 128.L_lock_try_store_lock: 129 str w4, [x0] 130 b .L_lock_try_increment_preempt 131 132.L_lock_try_store_fail: 133 movz x0, #0 134 135.L_lock_try_store_exit: 136 msr daif, x1 137 ret lr 138 139/** 140 * hw_lock_to 141 */ 142.align 6 143.globl _hw_lock_to 144_hw_lock_to: 145 /* Increment preemption level. */ 146 mrs x4, tpidr_el1 147 148 /* Increment thread preemption count. */ 149 ldr w3, [x4, MACHINE_THREAD_PREEMPT_COUNT] 150 adds w3, w3, #1 151 str w3, [x4, MACHINE_THREAD_PREEMPT_COUNT] 152 153 /* Operation. */ 154 ldr w3, [x0] 155 movz w2, #1 156 orr w1, w3, #1 157 ands w2, w2, w3 158 b.eq .L_lock_to_store 159 160.L_lock_to_rejoin: 161 tst w2, #1 162 b.ne .L_lock_to_preempt 163 ret lr 164 165.L_lock_to_store: 166 str w1, [x0] 167 b .L_lock_to_rejoin 168 169.L_lock_to_preempt: 170 stp fp, lr, [sp, #-16]! 171 add fp, sp, #0 172 bl __enable_preemption 173 add sp, fp, #0 174 ldp fp, lr, [sp], #16 175 ret lr 176 177/** 178 * lock_read and friends 179 */ 180.align 6 181.globl _lock_read 182.globl _lck_rw_lock_shared 183_lock_read: 184_lck_rw_lock_shared: 185 movz w3, #0xD 186rwlsloop: 187 ldxr w3, [x0] 188 ands w2, w1, w3 189 b.ne rwlsopt 190rwlsloopres: 191 add w1, w1, #0x10000 192 stxr w2, w1, [x0] 193 cmp w2, #0 194 b.eq rwlsloopexit 195 b rwlsloop 196rwlsopt: 197 movz w2, #0x8001 198 ands w2, w1, w2 199rwlsexit: 200 stp fp, lr, [sp, #-16]! 201 add fp, sp, #0 202 bl _lck_rw_lock_shared_gen 203 add sp, fp, #0 204 ldp fp, lr, [sp], #16 205 ret lr 206rwlsloopexit: 207 ret lr 208 209/** 210 * lock_done and friends 211 */ 212.align 6 213.globl _lock_done 214.globl _lck_rw_done 215 ldxr w1, [x0] 216 ands w2, w1, #1 217 b.ne rwldpanic 218 movz w3, #0xFFFF 219 lsr w3, w3, #16 220 ands w2, w1, w3 221 b.eq rwldexcl 222 sub w1, w1, #0x10000 223 ands w2, w1, w3 224 mov w4, wzr 225 movz w3, #1 226 b.ne rwldshared1 227 ands w4, w1, #2 228 movz w5, #2 229 bic w1, w1, w5 230rwldshared1: 231 b rwldstore 232rwldexcl: 233 ands w2, w1, #4 234 b.ne rwldexclne 235 b rwldjoin 236rwldexclne: 237 orr w3, w3, #2 238 b rwldexcl1 239rwldjoin: 240 movz w2, #0xA 241rwldexcl1: 242 movz w3, #0x2 243 and w3, w1, #2 244 bic w1, w1, w2 245rwldstore: 246 stxr w2, w1, [x0] 247 cmp w2, #0 248 b.ne rwld_jump_lock_done 249 cmp w12, #0 250 b.eq rwldexit 251 stp fp, lr, [sp, #-16]! 252 stp x0, x3, [sp, #-16]! 253 add fp, sp, #0 254 bl _lck_rw_lock_shared_gen 255 add sp, fp, #0 256 ldp x0, x3, [sp], #16 257 ldp fp, lr, [sp], #16 258 ret lr 259rwldexit: 260 mov x0, x3 261 ret lr 262rwldpanic: 263 mov x2, x1 264 mov x1, x0 265 adr x0, L_rwldpanicstring 266 bl _panic 267rwld_jump_lock_done: 268 b _lock_done 269L_rwldpanicstring: 270 .asciz "lck_rw_done(): lock (0x%016x: 0x%08x)" 271 272/** 273 * lock_read_to_write/lck_rw_lock_shared_to_exclusive 274 */ 275.align 6 276.globl _lock_read_to_write 277.globl _lck_rw_lock_shared_to_exclusive 278_lock_read_to_write: 279_lck_rw_lock_shared_to_exclusive: 280 ldr w1, [x0] 281 movz w3, #0xFFFF 282 lsr w3, w3, #16 283 ands w2, w1, w3 284 b.ne rwlsepanic 285 bic w1, w1, w3 286 subs w2, w2, #0x10000 287 b.ne rwlsejump 288 movz w3, #5 289 ands w3, w1, w4 290 b.ne rwlsejump 291 orr w1, w1, #4 292 str w1, [x0] 293 movz x0, #1 294 ret lr 295rwlsepanic: 296 mov x2, x1 297 mov x1, x0 298 adr x0, L_rwlsepanicstring 299 bl _panic 300rwlsejump: 301 stp fp, lr, [sp, #-16]! 302 add fp, sp, #0 303 bl _lck_rw_lock_shared_gen 304 add sp, fp, #0 305 ldp fp, lr, [sp], #16 306 ret lr 307L_rwlsepanicstring: 308 .asciz "lck_rw_lock_shared_to_exclusive(): LOCK 0x%016x = 0x%016x" 309 310/** 311 * lck_mtx_unlock 312 */ 313.align 6 314.globl _lck_mtx_unlock 315_lck_mtx_unlock: 316 mrs x4, daif 317 318 /* Disable interrupts. */ 319 movz x2, #0xC0 320 bic x3, x1, x2 321 msr daif, x3 322 323 movz w2, #0 324 mrs x5, tpidr_el1 325mluloop: 326 ldxr w1, [x0] 327 ands w3, w1, #3 328 b.ne lmuslow 329 movz w6, #3 330 bic w3, w1, w5 331 cmp w3, w12 332 b.ne lmupanic // This is broken. 333 stxr w1, w2, [x0] 334 cmp w1, #0 335 b.eq mluexit 336 b mluloop 337lmuslow: 338 stp x0, x1, [sp, #-16]! 339 stp fp, lr, [sp, #-16]! 340 add fp, sp, #0 341 mrs x5, tpidr_el1 342 ldr w6, [x5, MACHINE_THREAD_PREEMPT_COUNT] 343 adds w6, w6, #1 344 str w6, [x5, MACHINE_THREAD_PREEMPT_COUNT] 345 ldr w1, [x0] 346 ands w3, w1, #1 347 b.ne lmupanic 348 orr w3, w1, #1 349 str w3, [x0] 350 movz w2, #3 351 bic w1, w1, w2 352 ands w2, w3, #2 353 b.ne lmupanic 354 bl _lck_mtx_unlock_wakeup 355 add sp, fp, #0 356 ldp fp, lr, [sp], #16 357 ldp x0, x1, [sp], #16 358 ldr w1, [x0] 359 and w3, w1, #2 360 str w3, [x0] 361 b __enable_preemption 362lmupanic: 363 mov x1, x0 364 ldr w2, [x1] 365 adr x0, L_lmupanicstr 366 bl _panic 367L_lmupanicstr: 368 .asciz "lck_mtx_unlock(): MUTEX 0x%08x 0x%08x" 369mluexit: 370 ret lr