1#define __SYSCALL_LL_E(x) \
2((union { long long ll; long l[2]; }){ .ll = x }).l[0], \
3((union { long long ll; long l[2]; }){ .ll = x }).l[1]
4#define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x))
5
6__attribute__((visibility("hidden")))
7long (__syscall)(long, ...);
8
9#define SYSCALL_RLIM_INFINITY (-1UL/2)
10
11#if _MIPSEL || __MIPSEL || __MIPSEL__
12#define __stat_fix(st) ((st),(void)0)
13#else
14#include <sys/stat.h>
15static inline void __stat_fix(long p)
16{
17	struct stat *st = (struct stat *)p;
18	st->st_dev >>= 32;
19	st->st_rdev >>= 32;
20}
21#endif
22
23#ifndef __clang__
24
25static inline long __syscall0(long n)
26{
27	register long r7 __asm__("$7");
28	register long r2 __asm__("$2");
29	__asm__ __volatile__ (
30		"addu $2,$0,%2 ; syscall"
31		: "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7)
32		: "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
33		  "$14", "$15", "$24", "$25", "hi", "lo", "memory");
34	return r7 ? -r2 : r2;
35}
36
37static inline long __syscall1(long n, long a)
38{
39	register long r4 __asm__("$4") = a;
40	register long r7 __asm__("$7");
41	register long r2 __asm__("$2");
42	__asm__ __volatile__ (
43		"addu $2,$0,%2 ; syscall"
44		: "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
45		  "r"(r4)
46		: "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
47		  "$14", "$15", "$24", "$25", "hi", "lo", "memory");
48	return r7 ? -r2 : r2;
49}
50
51static inline long __syscall2(long n, long a, long b)
52{
53	register long r4 __asm__("$4") = a;
54	register long r5 __asm__("$5") = b;
55	register long r7 __asm__("$7");
56	register long r2 __asm__("$2");
57	__asm__ __volatile__ (
58		"addu $2,$0,%2 ; syscall"
59		: "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
60		  "r"(r4), "r"(r5)
61		: "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
62		  "$14", "$15", "$24", "$25", "hi", "lo", "memory");
63	if (r7) return -r2;
64	long ret = r2;
65	if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
66	return ret;
67}
68
69static inline long __syscall3(long n, long a, long b, long c)
70{
71	register long r4 __asm__("$4") = a;
72	register long r5 __asm__("$5") = b;
73	register long r6 __asm__("$6") = c;
74	register long r7 __asm__("$7");
75	register long r2 __asm__("$2");
76	__asm__ __volatile__ (
77		"addu $2,$0,%2 ; syscall"
78		: "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
79		  "r"(r4), "r"(r5), "r"(r6)
80		: "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
81		  "$14", "$15", "$24", "$25", "hi", "lo", "memory");
82	if (r7) return -r2;
83	long ret = r2;
84	if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
85	return ret;
86}
87
88static inline long __syscall4(long n, long a, long b, long c, long d)
89{
90	register long r4 __asm__("$4") = a;
91	register long r5 __asm__("$5") = b;
92	register long r6 __asm__("$6") = c;
93	register long r7 __asm__("$7") = d;
94	register long r2 __asm__("$2");
95	__asm__ __volatile__ (
96		"addu $2,$0,%2 ; syscall"
97		: "=&r"(r2), "=r"(r7) : "ir"(n), "0"(r2), "1"(r7),
98		  "r"(r4), "r"(r5), "r"(r6)
99		: "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13",
100		  "$14", "$15", "$24", "$25", "hi", "lo", "memory");
101	if (r7) return -r2;
102	long ret = r2;
103	if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
104	if (n == SYS_fstatat) __stat_fix(c);
105	return ret;
106}
107
108#else
109
110static inline long __syscall0(long n)
111{
112	return (__syscall)(n);
113}
114
115static inline long __syscall1(long n, long a)
116{
117	return (__syscall)(n, a);
118}
119
120static inline long __syscall2(long n, long a, long b)
121{
122	long r2 = (__syscall)(n, a, b);
123	if (r2 > -4096UL) return r2;
124	if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
125	return r2;
126}
127
128static inline long __syscall3(long n, long a, long b, long c)
129{
130	long r2 = (__syscall)(n, a, b, c);
131	if (r2 > -4096UL) return r2;
132	if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
133	return r2;
134}
135
136static inline long __syscall4(long n, long a, long b, long c, long d)
137{
138	long r2 = (__syscall)(n, a, b, c, d);
139	if (r2 > -4096UL) return r2;
140	if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
141	if (n == SYS_fstatat) __stat_fix(c);
142	return r2;
143}
144
145#endif
146
147static inline long __syscall5(long n, long a, long b, long c, long d, long e)
148{
149	long r2 = (__syscall)(n, a, b, c, d, e);
150	if (r2 > -4096UL) return r2;
151	if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
152	if (n == SYS_fstatat) __stat_fix(c);
153	return r2;
154}
155
156static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
157{
158	long r2 = (__syscall)(n, a, b, c, d, e, f);
159	if (r2 > -4096UL) return r2;
160	if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) __stat_fix(b);
161	if (n == SYS_fstatat) __stat_fix(c);
162	return r2;
163}
164
165#define VDSO_USEFUL
166#define VDSO_CGT_SYM "__vdso_clock_gettime"
167#define VDSO_CGT_VER "LINUX_2.6"
168