1/* $NetBSD: atomic_nand_32.S,v 1.5 2021/07/29 10:29:05 skrll Exp $ */ 2 3/*- 4 * Copyright (c) 2014 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * This code is derived from software contributed to The NetBSD Foundation 8 * by Matt Thomas of 3am Software Foundry. 9 * 10 * Redistribution and use in source and binary forms, with or without 11 * modification, are permitted provided that the following conditions 12 * are met: 13 * 1. Redistributions of source code must retain the above copyright 14 * notice, this list of conditions and the following disclaimer. 15 * 2. Redistributions in binary form must reproduce the above copyright 16 * notice, this list of conditions and the following disclaimer in the 17 * documentation and/or other materials provided with the distribution. 18 * 19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29 * POSSIBILITY OF SUCH DAMAGE. 30 */ 31 32#include "atomic_op_asm.h" 33 34/* 35 * { tmp = *ptr; *ptr = ~(tmp & value); return tmp; } // nand 36 */ 37ENTRY_NP(_atomic_nand_32) 38 mov x4, x0 /* need r0 for return value */ 391: ldxr w0, [x4] /* load old value (*ptr) */ 40 and w3, w0, w1 /* w3 = (*ptr & value) */ 41 mvn w3, w3 /* w3 = ~(*ptr & value) */ 42 stxr w2, w3, [x4] /* try to store */ 43 cbnz w2, 2f /* succeed? no, try again */ 44 ret /* return old value */ 452: b 1b 46END(_atomic_nand_32) 47 48ATOMIC_OP_ALIAS(atomic_nand_32,_atomic_nand_32) 49ATOMIC_OP_ALIAS(atomic_nand_uint,_atomic_nand_32) 50STRONG_ALIAS(_atomic_nand_uint,_atomic_nand_32) 51 52ENTRY_NP(__sync_fetch_and_nand_4) 53 mov x4, x0 /* need r0 for return value */ 54 dmb ish 551: ldxr w0, [x4] /* load old value (*ptr) */ 56 and w3, w0, w1 /* w3 = (*ptr & value) */ 57 mvn w3, w3 /* w3 = ~(*ptr & value) */ 58 stxr w2, w3, [x4] /* try to store */ 59 cbnz w2, 2f /* succeed? no, try again */ 60 dmb ish 61 ret /* return old value */ 622: b 1b 63END(__sync_fetch_and_nand_4) 64 65 66/* 67 * { tmp = ~(*ptr & value); *ptr = tmp; return *ptr; } // nand 68 */ 69ENTRY_NP(_atomic_nand_32_nv) 70 mov x4, x0 /* need r0 for return value */ 711: ldxr w0, [x4] /* load old value (*ptr) */ 72 and w0, w0, w1 /* w0 = (*ptr & value) */ 73 mvn w0, w0 /* w0 = ~(*ptr & value), return value */ 74 stxr w2, w0, [x4] /* try to store */ 75 cbnz w2, 2f /* succeed? no, try again? */ 76 ret /* return new value */ 772: b 1b 78END(_atomic_nand_32_nv) 79 80ATOMIC_OP_ALIAS(atomic_nand_32_nv,_atomic_nand_32_nv) 81ATOMIC_OP_ALIAS(atomic_nand_uint_nv,_atomic_nand_32_nv) 82STRONG_ALIAS(_atomic_nand_uint_nv,_atomic_nand_32_nv) 83 84ENTRY_NP(__sync_nand_and_fetch_4) 85 mov x4, x0 /* need r0 for return value */ 86 dmb ish 871: ldxr w0, [x4] /* load old value (tmp) */ 88 and w0, w0, w1 /* w0 = (tmp & value) */ 89 mvn w0, w0 /* w0 = ~(tmp & value), return */ 90 stxr w2, w0, [x4] /* try to store */ 91 cbnz w2, 2f /* succeed? no, try again? */ 92 dmb ish 93 ret /* return new value */ 942: b 1b 95END(__sync_nand_and_fetch_4) 96