1#ifndef __M68K_UACCESS_H
2#define __M68K_UACCESS_H
3
4/*
5 * User space memory access functions
6 */
7#include <linux/compiler.h>
8#include <linux/errno.h>
9#include <linux/types.h>
10#include <linux/sched.h>
11#include <asm/segment.h>
12
13#define VERIFY_READ	0
14#define VERIFY_WRITE	1
15
16/* We let the MMU do all checking */
17#define access_ok(type,addr,size) 1
18
19/*
20 * The exception table consists of pairs of addresses: the first is the
21 * address of an instruction that is allowed to fault, and the second is
22 * the address at which the program should continue.  No registers are
23 * modified, so it is entirely up to the continuation code to figure out
24 * what to do.
25 *
26 * All the routines below use bits of fixup code that are out of line
27 * with the main instruction path.  This means when everything is well,
28 * we don't even have to jump over them.  Further, they do not intrude
29 * on our cache or tlb entries.
30 */
31
32struct exception_table_entry
33{
34	unsigned long insn, fixup;
35};
36
37extern int __put_user_bad(void);
38extern int __get_user_bad(void);
39
40#define __put_user_asm(res, x, ptr, bwl, reg, err)	\
41asm volatile ("\n"					\
42	"1:	moves."#bwl"	%2,%1\n"		\
43	"2:\n"						\
44	"	.section .fixup,\"ax\"\n"		\
45	"	.even\n"				\
46	"10:	moveq.l	%3,%0\n"			\
47	"	jra 2b\n"				\
48	"	.previous\n"				\
49	"\n"						\
50	"	.section __ex_table,\"a\"\n"		\
51	"	.align	4\n"				\
52	"	.long	1b,10b\n"			\
53	"	.long	2b,10b\n"			\
54	"	.previous"				\
55	: "+d" (res), "=m" (*(ptr))			\
56	: #reg (x), "i" (err))
57
58/*
59 * These are the main single-value transfer routines.  They automatically
60 * use the right size if we just have the right pointer type.
61 */
62
63#define __put_user(x, ptr)						\
64({									\
65	typeof(*(ptr)) __pu_val = (x);					\
66	int __pu_err = 0;						\
67	__chk_user_ptr(ptr);						\
68	switch (sizeof (*(ptr))) {					\
69	case 1:								\
70		__put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT);	\
71		break;							\
72	case 2:								\
73		__put_user_asm(__pu_err, __pu_val, ptr, w, d, -EFAULT);	\
74		break;							\
75	case 4:								\
76		__put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT);	\
77		break;							\
78	case 8:								\
79 	    {								\
80 		const void __user *__pu_ptr = (ptr);			\
81		asm volatile ("\n"					\
82			"1:	moves.l	%2,(%1)+\n"			\
83			"2:	moves.l	%R2,(%1)\n"			\
84			"3:\n"						\
85			"	.section .fixup,\"ax\"\n"		\
86			"	.even\n"				\
87			"10:	movel %3,%0\n"				\
88			"	jra 3b\n"				\
89			"	.previous\n"				\
90			"\n"						\
91			"	.section __ex_table,\"a\"\n"		\
92			"	.align 4\n"				\
93			"	.long 1b,10b\n"				\
94			"	.long 2b,10b\n"				\
95			"	.long 3b,10b\n"				\
96			"	.previous"				\
97			: "+d" (__pu_err), "+a" (__pu_ptr)		\
98			: "r" (__pu_val), "i" (-EFAULT)			\
99			: "memory");					\
100		break;							\
101	    }								\
102	default:							\
103		__pu_err = __put_user_bad();				\
104		break;							\
105	}								\
106	__pu_err;							\
107})
108#define put_user(x, ptr)	__put_user(x, ptr)
109
110
111#define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({	\
112	type __gu_val;						\
113	asm volatile ("\n"					\
114		"1:	moves."#bwl"	%2,%1\n"		\
115		"2:\n"						\
116		"	.section .fixup,\"ax\"\n"		\
117		"	.even\n"				\
118		"10:	move.l	%3,%0\n"			\
119		"	sub."#bwl"	%1,%1\n"		\
120		"	jra	2b\n"				\
121		"	.previous\n"				\
122		"\n"						\
123		"	.section __ex_table,\"a\"\n"		\
124		"	.align	4\n"				\
125		"	.long	1b,10b\n"			\
126		"	.previous"				\
127		: "+d" (res), "=&" #reg (__gu_val)		\
128		: "m" (*(ptr)), "i" (err));			\
129	(x) = (typeof(*(ptr)))(unsigned long)__gu_val;		\
130})
131
132#define __get_user(x, ptr)						\
133({									\
134	int __gu_err = 0;						\
135	__chk_user_ptr(ptr);						\
136	switch (sizeof(*(ptr))) {					\
137	case 1:								\
138		__get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT);	\
139		break;							\
140	case 2:								\
141		__get_user_asm(__gu_err, x, ptr, u16, w, d, -EFAULT);	\
142		break;							\
143	case 4:								\
144		__get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT);	\
145		break;							\
146/*	case 8:	disabled because gcc-4.1 has a broken typeof		\
147 	    {								\
148 		const void *__gu_ptr = (ptr);				\
149 		u64 __gu_val;						\
150		asm volatile ("\n"					\
151			"1:	moves.l	(%2)+,%1\n"			\
152			"2:	moves.l	(%2),%R1\n"			\
153			"3:\n"						\
154			"	.section .fixup,\"ax\"\n"		\
155			"	.even\n"				\
156			"10:	move.l	%3,%0\n"			\
157			"	sub.l	%1,%1\n"			\
158			"	sub.l	%R1,%R1\n"			\
159			"	jra	3b\n"				\
160			"	.previous\n"				\
161			"\n"						\
162			"	.section __ex_table,\"a\"\n"		\
163			"	.align	4\n"				\
164			"	.long	1b,10b\n"			\
165			"	.long	2b,10b\n"			\
166			"	.previous"				\
167			: "+d" (__gu_err), "=&r" (__gu_val),		\
168			  "+a" (__gu_ptr)				\
169			: "i" (-EFAULT)					\
170			: "memory");					\
171		(x) = (typeof(*(ptr)))__gu_val;				\
172		break;							\
173	    }	*/							\
174	default:							\
175		__gu_err = __get_user_bad();				\
176		break;							\
177	}								\
178	__gu_err;							\
179})
180#define get_user(x, ptr) __get_user(x, ptr)
181
182unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
183unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
184
185#define __constant_copy_from_user_asm(res, to, from, tmp, n, s1, s2, s3)\
186	asm volatile ("\n"						\
187		"1:	moves."#s1"	(%2)+,%3\n"			\
188		"	move."#s1"	%3,(%1)+\n"			\
189		"2:	moves."#s2"	(%2)+,%3\n"			\
190		"	move."#s2"	%3,(%1)+\n"			\
191		"	.ifnc	\""#s3"\",\"\"\n"			\
192		"3:	moves."#s3"	(%2)+,%3\n"			\
193		"	move."#s3"	%3,(%1)+\n"			\
194		"	.endif\n"					\
195		"4:\n"							\
196		"	.section __ex_table,\"a\"\n"			\
197		"	.align	4\n"					\
198		"	.long	1b,10f\n"				\
199		"	.long	2b,20f\n"				\
200		"	.ifnc	\""#s3"\",\"\"\n"			\
201		"	.long	3b,30f\n"				\
202		"	.endif\n"					\
203		"	.previous\n"					\
204		"\n"							\
205		"	.section .fixup,\"ax\"\n"			\
206		"	.even\n"					\
207		"10:	clr."#s1"	(%1)+\n"			\
208		"20:	clr."#s2"	(%1)+\n"			\
209		"	.ifnc	\""#s3"\",\"\"\n"			\
210		"30:	clr."#s3"	(%1)+\n"			\
211		"	.endif\n"					\
212		"	moveq.l	#"#n",%0\n"				\
213		"	jra	4b\n"					\
214		"	.previous\n"					\
215		: "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp)	\
216		: : "memory")
217
218static __always_inline unsigned long
219__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
220{
221	unsigned long res = 0, tmp;
222
223	switch (n) {
224	case 1:
225		__get_user_asm(res, *(u8 *)to, (u8 __user *)from, u8, b, d, 1);
226		break;
227	case 2:
228		__get_user_asm(res, *(u16 *)to, (u16 __user *)from, u16, w, d, 2);
229		break;
230	case 3:
231		__constant_copy_from_user_asm(res, to, from, tmp, 3, w, b,);
232		break;
233	case 4:
234		__get_user_asm(res, *(u32 *)to, (u32 __user *)from, u32, l, r, 4);
235		break;
236	case 5:
237		__constant_copy_from_user_asm(res, to, from, tmp, 5, l, b,);
238		break;
239	case 6:
240		__constant_copy_from_user_asm(res, to, from, tmp, 6, l, w,);
241		break;
242	case 7:
243		__constant_copy_from_user_asm(res, to, from, tmp, 7, l, w, b);
244		break;
245	case 8:
246		__constant_copy_from_user_asm(res, to, from, tmp, 8, l, l,);
247		break;
248	case 9:
249		__constant_copy_from_user_asm(res, to, from, tmp, 9, l, l, b);
250		break;
251	case 10:
252		__constant_copy_from_user_asm(res, to, from, tmp, 10, l, l, w);
253		break;
254	case 12:
255		__constant_copy_from_user_asm(res, to, from, tmp, 12, l, l, l);
256		break;
257	default:
258		/* we limit the inlined version to 3 moves */
259		return __generic_copy_from_user(to, from, n);
260	}
261
262	return res;
263}
264
265#define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3)	\
266	asm volatile ("\n"						\
267		"	move."#s1"	(%2)+,%3\n"			\
268		"11:	moves."#s1"	%3,(%1)+\n"			\
269		"12:	move."#s2"	(%2)+,%3\n"			\
270		"21:	moves."#s2"	%3,(%1)+\n"			\
271		"22:\n"							\
272		"	.ifnc	\""#s3"\",\"\"\n"			\
273		"	move."#s3"	(%2)+,%3\n"			\
274		"31:	moves."#s3"	%3,(%1)+\n"			\
275		"32:\n"							\
276		"	.endif\n"					\
277		"4:\n"							\
278		"\n"							\
279		"	.section __ex_table,\"a\"\n"			\
280		"	.align	4\n"					\
281		"	.long	11b,5f\n"				\
282		"	.long	12b,5f\n"				\
283		"	.long	21b,5f\n"				\
284		"	.long	22b,5f\n"				\
285		"	.ifnc	\""#s3"\",\"\"\n"			\
286		"	.long	31b,5f\n"				\
287		"	.long	32b,5f\n"				\
288		"	.endif\n"					\
289		"	.previous\n"					\
290		"\n"							\
291		"	.section .fixup,\"ax\"\n"			\
292		"	.even\n"					\
293		"5:	moveq.l	#"#n",%0\n"				\
294		"	jra	4b\n"					\
295		"	.previous\n"					\
296		: "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp)	\
297		: : "memory")
298
299static __always_inline unsigned long
300__constant_copy_to_user(void __user *to, const void *from, unsigned long n)
301{
302	unsigned long res = 0, tmp;
303
304	switch (n) {
305	case 1:
306		__put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
307		break;
308	case 2:
309		__put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, d, 2);
310		break;
311	case 3:
312		__constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
313		break;
314	case 4:
315		__put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4);
316		break;
317	case 5:
318		__constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
319		break;
320	case 6:
321		__constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
322		break;
323	case 7:
324		__constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
325		break;
326	case 8:
327		__constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
328		break;
329	case 9:
330		__constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
331		break;
332	case 10:
333		__constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
334		break;
335	case 12:
336		__constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
337		break;
338	default:
339		/* limit the inlined version to 3 moves */
340		return __generic_copy_to_user(to, from, n);
341	}
342
343	return res;
344}
345
346#define __copy_from_user(to, from, n)		\
347(__builtin_constant_p(n) ?			\
348 __constant_copy_from_user(to, from, n) :	\
349 __generic_copy_from_user(to, from, n))
350
351#define __copy_to_user(to, from, n)		\
352(__builtin_constant_p(n) ?			\
353 __constant_copy_to_user(to, from, n) :		\
354 __generic_copy_to_user(to, from, n))
355
356#define __copy_to_user_inatomic		__copy_to_user
357#define __copy_from_user_inatomic	__copy_from_user
358
359#define copy_from_user(to, from, n)	__copy_from_user(to, from, n)
360#define copy_to_user(to, from, n)	__copy_to_user(to, from, n)
361
362long strncpy_from_user(char *dst, const char __user *src, long count);
363long strnlen_user(const char __user *src, long n);
364unsigned long __clear_user(void __user *to, unsigned long n);
365
366#define clear_user	__clear_user
367
368#define strlen_user(str) strnlen_user(str, 32767)
369
370#endif /* _M68K_UACCESS_H */
371