1/* $NetBSD: atomic_nand_16.S,v 1.5 2021/07/29 10:29:05 skrll Exp $ */ 2 3/*- 4 * Copyright (c) 2014 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * This code is derived from software contributed to The NetBSD Foundation 8 * by Matt Thomas of 3am Software Foundry. 9 * 10 * Redistribution and use in source and binary forms, with or without 11 * modification, are permitted provided that the following conditions 12 * are met: 13 * 1. Redistributions of source code must retain the above copyright 14 * notice, this list of conditions and the following disclaimer. 15 * 2. Redistributions in binary form must reproduce the above copyright 16 * notice, this list of conditions and the following disclaimer in the 17 * documentation and/or other materials provided with the distribution. 18 * 19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29 * POSSIBILITY OF SUCH DAMAGE. 30 */ 31 32#include "atomic_op_asm.h" 33 34/* 35 * { tmp = *ptr; *ptr = ~(tmp & value); return tmp; } // nand 36 */ 37ENTRY_NP(_atomic_nand_16) 38 mov x4, x0 /* need r0 for return value */ 391: ldxrh w0, [x4] /* load old value (*ptr) */ 40 and w3, w0, w1 /* w3 = (*ptr & value) */ 41 mvn w3, w3 /* w3 = ~(*ptr & value) */ 42 stxrh w2, w3, [x4] /* try to store */ 43 cbnz w2, 2f /* succeed? no, try again */ 44 ret /* return old value */ 452: b 1b 46END(_atomic_nand_16) 47 48ENTRY_NP(__sync_fetch_and_nand_2) 49 mov x4, x0 /* need r0 for return value */ 50 dmb ish 511: ldxrh w0, [x4] /* load old value (*ptr) */ 52 and w3, w0, w1 /* w3 = (*ptr & value) */ 53 mvn w3, w3 /* w3 = ~(*ptr & value) */ 54 stxrh w2, w3, [x4] /* try to store */ 55 cbnz w2, 2f /* succeed? no, try again */ 56 dmb ish 57 ret /* return old value */ 582: b 1b 59END(__sync_fetch_and_nand_2) 60 61ATOMIC_OP_ALIAS(atomic_nand_16,_atomic_nand_16) 62ATOMIC_OP_ALIAS(atomic_nand_ushort,_atomic_nand_16) 63STRONG_ALIAS(_atomic_nand_ushort,_atomic_nand_16) 64 65 66/* 67 * { tmp = ~(*ptr & value); *ptr = tmp; return *ptr; } // nand 68 */ 69ENTRY_NP(_atomic_nand_16_nv) 70 mov x4, x0 /* need r0 for return value */ 711: ldxrh w0, [x4] /* load old value (*ptr) */ 72 and w0, w0, w1 /* w0 = (*ptr & value) */ 73 mvn w0, w0 /* w0 = ~(*ptr & value), return value */ 74 stxrh w2, w0, [x4] /* try to store */ 75 cbnz w2, 2f /* succeed? no, try again? */ 76 ret /* return new value */ 772: b 1b 78END(_atomic_nand_16_nv) 79 80ENTRY_NP(__sync_nand_and_fetch_2) 81 mov x4, x0 /* need r0 for return value */ 82 dmb ish 831: ldxrh w0, [x4] /* load old value (*ptr) */ 84 and w0, w0, w1 /* w0 = (*ptr & value) */ 85 mvn w0, w0 /* w0 = ~(*ptr & value), return */ 86 stxrh w2, w0, [x4] /* try to store */ 87 cbnz w2, 2f /* succeed? no, try again? */ 88 dmb ish 89 ret /* return new value */ 902: b 1b 91END(__sync_nand_and_fetch_2) 92 93ATOMIC_OP_ALIAS(atomic_nand_16_nv,_atomic_nand_16_nv) 94ATOMIC_OP_ALIAS(atomic_nand_ushort_nv,_atomic_nand_16_nv) 95STRONG_ALIAS(_atomic_nand_ushort_nv,_atomic_nand_16_nv) 96