1/* SPDX-License-Identifier: GPL-2.0 */
2
3/*
4 * This file provides wrappers with sanitizer instrumentation for atomic bit
5 * operations.
6 *
7 * To use this functionality, an arch's bitops.h file needs to define each of
8 * the below bit operations with an arch_ prefix (e.g. arch_set_bit(),
9 * arch___set_bit(), etc.).
10 */
11#ifndef _ASM_GENERIC_BITOPS_INSTRUMENTED_ATOMIC_H
12#define _ASM_GENERIC_BITOPS_INSTRUMENTED_ATOMIC_H
13
14#include <linux/instrumented.h>
15
16/**
17 * set_bit - Atomically set a bit in memory
18 * @nr: the bit to set
19 * @addr: the address to start counting from
20 *
21 * This is a relaxed atomic operation (no implied memory barriers).
22 *
23 * Note that @nr may be almost arbitrarily large; this function is not
24 * restricted to acting on a single-word quantity.
25 */
26static __always_inline void set_bit(long nr, volatile unsigned long *addr)
27{
28	instrument_atomic_write(addr + BIT_WORD(nr), sizeof(long));
29	arch_set_bit(nr, addr);
30}
31
32/**
33 * clear_bit - Clears a bit in memory
34 * @nr: Bit to clear
35 * @addr: Address to start counting from
36 *
37 * This is a relaxed atomic operation (no implied memory barriers).
38 */
39static __always_inline void clear_bit(long nr, volatile unsigned long *addr)
40{
41	instrument_atomic_write(addr + BIT_WORD(nr), sizeof(long));
42	arch_clear_bit(nr, addr);
43}
44
45/**
46 * change_bit - Toggle a bit in memory
47 * @nr: Bit to change
48 * @addr: Address to start counting from
49 *
50 * This is a relaxed atomic operation (no implied memory barriers).
51 *
52 * Note that @nr may be almost arbitrarily large; this function is not
53 * restricted to acting on a single-word quantity.
54 */
55static __always_inline void change_bit(long nr, volatile unsigned long *addr)
56{
57	instrument_atomic_write(addr + BIT_WORD(nr), sizeof(long));
58	arch_change_bit(nr, addr);
59}
60
61/**
62 * test_and_set_bit - Set a bit and return its old value
63 * @nr: Bit to set
64 * @addr: Address to count from
65 *
66 * This is an atomic fully-ordered operation (implied full memory barrier).
67 */
68static __always_inline bool test_and_set_bit(long nr, volatile unsigned long *addr)
69{
70	kcsan_mb();
71	instrument_atomic_read_write(addr + BIT_WORD(nr), sizeof(long));
72	return arch_test_and_set_bit(nr, addr);
73}
74
75/**
76 * test_and_clear_bit - Clear a bit and return its old value
77 * @nr: Bit to clear
78 * @addr: Address to count from
79 *
80 * This is an atomic fully-ordered operation (implied full memory barrier).
81 */
82static __always_inline bool test_and_clear_bit(long nr, volatile unsigned long *addr)
83{
84	kcsan_mb();
85	instrument_atomic_read_write(addr + BIT_WORD(nr), sizeof(long));
86	return arch_test_and_clear_bit(nr, addr);
87}
88
89/**
90 * test_and_change_bit - Change a bit and return its old value
91 * @nr: Bit to change
92 * @addr: Address to count from
93 *
94 * This is an atomic fully-ordered operation (implied full memory barrier).
95 */
96static __always_inline bool test_and_change_bit(long nr, volatile unsigned long *addr)
97{
98	kcsan_mb();
99	instrument_atomic_read_write(addr + BIT_WORD(nr), sizeof(long));
100	return arch_test_and_change_bit(nr, addr);
101}
102
103#endif /* _ASM_GENERIC_BITOPS_INSTRUMENTED_NON_ATOMIC_H */
104