1/* $NetBSD: atomic_add_8.S,v 1.5 2021/07/28 07:32:20 skrll Exp $ */ 2 3/*- 4 * Copyright (c) 2013 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * This code is derived from software contributed to The NetBSD Foundation 8 * by Matt Thomas <matt@3am-software.com> 9 * 10 * Redistribution and use in source and binary forms, with or without 11 * modification, are permitted provided that the following conditions 12 * are met: 13 * 1. Redistributions of source code must retain the above copyright 14 * notice, this list of conditions and the following disclaimer. 15 * 2. Redistributions in binary form must reproduce the above copyright 16 * notice, this list of conditions and the following disclaimer in the 17 * documentation and/or other materials provided with the distribution. 18 * 19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29 * POSSIBILITY OF SUCH DAMAGE. 30 */ 31 32#include "atomic_op_asm.h" 33 34#if defined(_ARM_ARCH_6) 35 36ENTRY_NP(_atomic_sub_8) 37 negs r1, r1 38 /* FALLTHROUGH */ 39ENTRY_NP(_atomic_add_8) 40 mov ip, r0 411: ldrexb r0, [ip] /* load old value */ 42 adds r3, r0, r1 /* calculate new value */ 43 strexb r2, r3, [ip] /* try to store */ 44 cmp r2, #0 /* succeed? */ 45 bne 1b /* no, try again */ 46 RET /* return old value */ 47END(_atomic_add_8) 48END(_atomic_sub_8) 49 50ATOMIC_OP_ALIAS(atomic_add_8,_atomic_add_8) 51ATOMIC_OP_ALIAS(atomic_add_char,_atomic_add_8) 52ATOMIC_OP_ALIAS(atomic_add_uchar,_atomic_add_8) 53CRT_ALIAS(__atomic_fetch_add_1,_atomic_add_8) 54STRONG_ALIAS(_atomic_add_char,_atomic_add_8) 55STRONG_ALIAS(_atomic_add_uchar,_atomic_add_8) 56 57ENTRY_NP(__sync_fetch_and_add_1) 58 push {r4, lr} 59 DMB 60 bl _atomic_add_8 61 DMB 62 pop {r4, pc} 63END(__sync_fetch_and_add_1) 64 65ENTRY_NP(__sync_fetch_and_sub_1) 66 push {r4, lr} 67 DMB 68 bl _atomic_sub_8 69 DMB 70 pop {r4, pc} 71END(__sync_fetch_and_sub_1) 72 73ATOMIC_OP_ALIAS(atomic_sub_8,_atomic_sub_8) 74ATOMIC_OP_ALIAS(atomic_sub_char,_atomic_sub_8) 75ATOMIC_OP_ALIAS(atomic_sub_uchar,_atomic_sub_8) 76CRT_ALIAS(__atomic_fetch_sub_1,_atomic_sub_8) 77STRONG_ALIAS(_atomic_sub_char,_atomic_sub_8) 78STRONG_ALIAS(_atomic_sub_uchar,_atomic_sub_8) 79 80 81ENTRY_NP(_atomic_sub_8_nv) 82 negs r1, r1 83 /* FALLTHROUGH */ 84ENTRY_NP(_atomic_add_8_nv) 85 mov ip, r0 /* need r0 for return value */ 861: ldrexb r0, [ip] /* load old value */ 87 adds r0, r0, r1 /* calculate new value (return value) */ 88 strexb r2, r0, [ip] /* try to store */ 89 cmp r2, #0 /* succeed? */ 90 bne 1b /* no, try again? */ 91 RET /* return new value */ 92END(_atomic_add_8_nv) 93END(_atomic_sub_8_nv) 94 95ATOMIC_OP_ALIAS(atomic_add_8_nv,_atomic_add_8_nv) 96ATOMIC_OP_ALIAS(atomic_add_char_nv,_atomic_add_8_nv) 97ATOMIC_OP_ALIAS(atomic_add_uchar_nv,_atomic_add_8_nv) 98STRONG_ALIAS(_atomic_add_char_nv,_atomic_add_8_nv) 99STRONG_ALIAS(_atomic_add_uchar_nv,_atomic_add_8_nv) 100 101ATOMIC_OP_ALIAS(atomic_sub_8_nv,_atomic_sub_8_nv) 102ATOMIC_OP_ALIAS(atomic_sub_char_nv,_atomic_sub_8_nv) 103ATOMIC_OP_ALIAS(atomic_sub_uchar_nv,_atomic_sub_8_nv) 104STRONG_ALIAS(_atomic_sub_char_nv,_atomic_sub_8_nv) 105STRONG_ALIAS(_atomic_sub_uchar_nv,_atomic_sub_8_nv) 106 107ENTRY_NP(__sync_add_and_fetch_1) 108 push {r4, lr} 109 DMB 110 bl _atomic_add_8_nv 111 DMB 112 pop {r4, pc} 113END(__sync_add_and_fetch_1) 114 115ENTRY_NP(__sync_sub_and_fetch_1) 116 push {r4, lr} 117 DMB 118 bl _atomic_sub_8_nv 119 DMB 120 pop {r4, pc} 121END(__sync_sub_and_fetch_1) 122 123#endif /* _ARM_ARCH_6 */ 124