1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2012 ARM Ltd.
4 */
5#ifndef __ASM_FUTEX_H
6#define __ASM_FUTEX_H
7
8#include <linux/futex.h>
9#include <linux/uaccess.h>
10
11#include <asm/errno.h>
12
13#define FUTEX_MAX_LOOPS	128 /* What's the largest number you can think of? */
14
15#define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg)		\
16do {									\
17	unsigned int loops = FUTEX_MAX_LOOPS;				\
18									\
19	uaccess_enable_privileged();					\
20	asm volatile(							\
21"	prfm	pstl1strm, %2\n"					\
22"1:	ldxr	%w1, %2\n"						\
23	insn "\n"							\
24"2:	stlxr	%w0, %w3, %2\n"						\
25"	cbz	%w0, 3f\n"						\
26"	sub	%w4, %w4, %w0\n"					\
27"	cbnz	%w4, 1b\n"						\
28"	mov	%w0, %w6\n"						\
29"3:\n"									\
30"	dmb	ish\n"							\
31	_ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0)				\
32	_ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0)				\
33	: "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp),	\
34	  "+r" (loops)							\
35	: "r" (oparg), "Ir" (-EAGAIN)					\
36	: "memory");							\
37	uaccess_disable_privileged();					\
38} while (0)
39
40static inline int
41arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
42{
43	int oldval = 0, ret, tmp;
44	u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
45
46	if (!access_ok(_uaddr, sizeof(u32)))
47		return -EFAULT;
48
49	switch (op) {
50	case FUTEX_OP_SET:
51		__futex_atomic_op("mov	%w3, %w5",
52				  ret, oldval, uaddr, tmp, oparg);
53		break;
54	case FUTEX_OP_ADD:
55		__futex_atomic_op("add	%w3, %w1, %w5",
56				  ret, oldval, uaddr, tmp, oparg);
57		break;
58	case FUTEX_OP_OR:
59		__futex_atomic_op("orr	%w3, %w1, %w5",
60				  ret, oldval, uaddr, tmp, oparg);
61		break;
62	case FUTEX_OP_ANDN:
63		__futex_atomic_op("and	%w3, %w1, %w5",
64				  ret, oldval, uaddr, tmp, ~oparg);
65		break;
66	case FUTEX_OP_XOR:
67		__futex_atomic_op("eor	%w3, %w1, %w5",
68				  ret, oldval, uaddr, tmp, oparg);
69		break;
70	default:
71		ret = -ENOSYS;
72	}
73
74	if (!ret)
75		*oval = oldval;
76
77	return ret;
78}
79
80static inline int
81futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
82			      u32 oldval, u32 newval)
83{
84	int ret = 0;
85	unsigned int loops = FUTEX_MAX_LOOPS;
86	u32 val, tmp;
87	u32 __user *uaddr;
88
89	if (!access_ok(_uaddr, sizeof(u32)))
90		return -EFAULT;
91
92	uaddr = __uaccess_mask_ptr(_uaddr);
93	uaccess_enable_privileged();
94	asm volatile("// futex_atomic_cmpxchg_inatomic\n"
95"	prfm	pstl1strm, %2\n"
96"1:	ldxr	%w1, %2\n"
97"	sub	%w3, %w1, %w5\n"
98"	cbnz	%w3, 4f\n"
99"2:	stlxr	%w3, %w6, %2\n"
100"	cbz	%w3, 3f\n"
101"	sub	%w4, %w4, %w3\n"
102"	cbnz	%w4, 1b\n"
103"	mov	%w0, %w7\n"
104"3:\n"
105"	dmb	ish\n"
106"4:\n"
107	_ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
108	_ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
109	: "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
110	: "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
111	: "memory");
112	uaccess_disable_privileged();
113
114	if (!ret)
115		*uval = val;
116
117	return ret;
118}
119
120#endif /* __ASM_FUTEX_H */
121