1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2013 ARM Ltd.
4 */
5#ifndef __ASM_WORD_AT_A_TIME_H
6#define __ASM_WORD_AT_A_TIME_H
7
8#include <linux/uaccess.h>
9
10#ifndef __AARCH64EB__
11
12#include <linux/bitops.h>
13#include <linux/wordpart.h>
14
15struct word_at_a_time {
16	const unsigned long one_bits, high_bits;
17};
18
19#define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
20
21static inline unsigned long has_zero(unsigned long a, unsigned long *bits,
22				     const struct word_at_a_time *c)
23{
24	unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
25	*bits = mask;
26	return mask;
27}
28
29#define prep_zero_mask(a, bits, c) (bits)
30
31static inline unsigned long create_zero_mask(unsigned long bits)
32{
33	bits = (bits - 1) & ~bits;
34	return bits >> 7;
35}
36
37static inline unsigned long find_zero(unsigned long mask)
38{
39	return fls64(mask) >> 3;
40}
41
42#define zero_bytemask(mask) (mask)
43
44#else	/* __AARCH64EB__ */
45#include <asm-generic/word-at-a-time.h>
46#endif
47
48/*
49 * Load an unaligned word from kernel space.
50 *
51 * In the (very unlikely) case of the word being a page-crosser
52 * and the next page not being mapped, take the exception and
53 * return zeroes in the non-existing part.
54 */
55static inline unsigned long load_unaligned_zeropad(const void *addr)
56{
57	unsigned long ret;
58
59	__mte_enable_tco_async();
60
61	/* Load word from unaligned pointer addr */
62	asm(
63	"1:	ldr	%0, %2\n"
64	"2:\n"
65	_ASM_EXTABLE_LOAD_UNALIGNED_ZEROPAD(1b, 2b, %0, %1)
66	: "=&r" (ret)
67	: "r" (addr), "Q" (*(unsigned long *)addr));
68
69	__mte_disable_tco_async();
70
71	return ret;
72}
73
74#endif /* __ASM_WORD_AT_A_TIME_H */
75