1/*	$NetBSD: macros.h,v 1.46 2017/05/22 17:12:11 ragge Exp $	*/
2
3/*
4 * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 *    notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 *    notice, this list of conditions and the following disclaimer in the
14 *    documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28 /* All bugs are subject to removal without further notice */
29
30#if !defined(_VAX_MACROS_H_) && !defined(__lint__)
31#define _VAX_MACROS_H_
32
33void	__blkset(void *, int, size_t);
34void	__blkcpy(const void *, void *, size_t);
35
36#if !__GNUC_PREREQ__(4, 1)
37/* Here general macros are supposed to be stored */
38
39static __inline int __attribute__((__unused__))
40vax_ffs(int reg)
41{
42	register int val;
43
44	__asm volatile ("ffs $0,$32,%1,%0;"
45			    "bneq 1f;"
46			    "mnegl $1,%0;"
47			    "1:;"
48			    "incl %0"
49			: "=&r" (val)
50			: "r" (reg) );
51	return	val;
52}
53#define ffs vax_ffs
54#endif
55
56static __inline void __attribute__((__unused__))
57vax_remque(void *p)
58{
59	__asm volatile ("remque (%0),%0;clrl 4(%0)"
60			:
61			: "r" (p)
62			: "memory" );
63}
64
65static __inline void  __attribute__((__unused__))
66vax_insque(void *p, void *q)
67{
68	__asm volatile ("insque (%0),(%1)"
69			:
70			: "r" (p),"r" (q)
71			: "memory" );
72}
73
74#if 0
75static __inline void *__attribute__((__unused__))
76vax_memcpy(void *to, const void *from, size_t len)
77{
78	if (len > 65535) {
79		__blkcpy(from, to, len);
80	} else {
81		__asm volatile ("movc3 %1,(%2),%0"
82			: "=m" (*(char *)to)
83			: "g" (len), "r" (*(const char *)from)
84			:"r0","r1","r2","r3","r4","r5","memory","cc");
85	}
86	return to;
87}
88#define memcpy vax_memcpy
89
90static __inline void *__attribute__((__unused__))
91vax_memmove(void *to, const void *from, size_t len)
92{
93	if (len > 65535) {
94		__blkcpy(from, to, len);
95	} else {
96		__asm __volatile ("movc3 %1,%2,%0"
97			: "=m" (*(char *)to)
98			: "g" (len), "mo" (*(const char *)from)
99			:"r0","r1","r2","r3","r4","r5","memory","cc");
100	}
101	return to;
102}
103#define memmove vax_memmove
104
105static __inline void *__attribute__((__unused__))
106vax_memset(void *block, int c, size_t len)
107{
108	if (len > 65535) {
109		__blkset(block, c, len);
110	} else {
111		__asm __volatile ("movc5 $0,(%%sp),%2,%1,%0"
112			: "=m" (*(char *)block)
113			:  "g" (len), "g" (c)
114			:"r0","r1","r2","r3","r4","r5","memory","cc");
115	}
116	return block;
117}
118#define memset vax_memset
119#endif
120
121#ifdef notdef
122/* XXX - the return syntax of memcmp is wrong */
123static __inline int __attribute__((__unused__))
124memcmp(const void *b1, const void *b2, size_t len)
125{
126	register int ret;
127
128	__asm volatile("cmpc3 %3,(%1),(%2);"
129			   "movl %%r0,%0"
130			: "=r" (ret)
131			: "r" (b1), "r" (b2), "r" (len)
132			: "r0","r1","r2","r3" );
133	return ret;
134}
135
136static __inline int __attribute__((__unused__))
137bcmp(const void *b1, const void *b2, size_t len)
138{
139	register int ret;
140
141	__asm volatile("cmpc3 %3,(%1),(%2);"
142			   "movl %%r0,%0"
143			: "=r" (ret)
144			: "r" (b1), "r" (b2), "r" (len)
145			: "r0","r1","r2","r3" );
146	return ret;
147}
148
149/* Begin nya */
150static __inline size_t __attribute__((__unused__))
151strlen(const char *cp)
152{
153        register size_t ret;
154
155        __asm volatile("locc $0,$65535,(%1);"
156			   "subl3 %%r0,$65535,%0"
157                        : "=r" (ret)
158                        : "r" (cp)
159                        : "r0","r1","cc" );
160        return  ret;
161}
162
163static __inline char * __attribute__((__unused__))
164strcat(char *cp, const char *c2)
165{
166        __asm volatile("locc $0,$65535,(%1);"
167			   "subl3 %%r0,$65535,%%r2;"
168			   "incl %%r2;"
169                           "locc $0,$65535,(%0);"
170			   "movc3 %%r2,(%1),(%%r1)"
171                        :
172                        : "r" (cp), "r" (c2)
173                        : "r0","r1","r2","r3","r4","r5","memory","cc");
174        return  cp;
175}
176
177static __inline char * __attribute__((__unused__))
178strncat(char *cp, const char *c2, size_t count)
179{
180        __asm volatile("locc $0,%2,(%1);"
181			   "subl3 %%r0,%2,%%r2;"
182                           "locc $0,$65535,(%0);"
183			   "movc3 %%r2,(%1),(%%r1);"
184			   "movb $0,(%%r3)"
185                        :
186                        : "r" (cp), "r" (c2), "g"(count)
187                        : "r0","r1","r2","r3","r4","r5","memory","cc");
188        return  cp;
189}
190
191static __inline char * __attribute__((__unused__))
192strcpy(char *cp, const char *c2)
193{
194        __asm volatile("locc $0,$65535,(%1);"
195			   "subl3 %%r0,$65535,%%r2;"
196                           "movc3 %%r2,(%1),(%0);"
197			   "movb $0,(%%r3)"
198                        :
199                        : "r" (cp), "r" (c2)
200                        : "r0","r1","r2","r3","r4","r5","memory","cc");
201        return  cp;
202}
203
204static __inline char * __attribute__((__unused__))
205strncpy(char *cp, const char *c2, size_t len)
206{
207        __asm volatile("movl %2,%%r2;"
208			   "locc $0,%%r2,(%1);"
209			   "beql 1f;"
210			   "subl3 %%r0,%2,%%r2;"
211                           "clrb (%0)[%%r2];"
212			   "1:;"
213			   "movc3 %%r2,(%1),(%0)"
214                        :
215                        : "r" (cp), "r" (c2), "g"(len)
216                        : "r0","r1","r2","r3","r4","r5","memory","cc");
217        return  cp;
218}
219
220static __inline void *__attribute__((__unused__))
221memchr(const void *cp, int c, size_t len)
222{
223        void *ret;
224        __asm volatile("locc %2,%3,(%1);"
225			   "bneq 1f;"
226			   "clrl %%r1;"
227			   "1:;"
228			   "movl %%r1,%0"
229                        : "=g"(ret)
230                        : "r" (cp), "r" (c), "g"(len)
231                        : "r0","r1","cc");
232        return  ret;
233}
234
235static __inline int __attribute__((__unused__))
236strcmp(const char *cp, const char *c2)
237{
238        register int ret;
239        __asm volatile("locc $0,$65535,(%1);"
240			   "subl3 %%r0,$65535,%%r0;"
241			   "incl %%r0;"
242                           "cmpc3 %%r0,(%1),(%2);"
243			   "beql 1f;"
244			   "movl $1,%%r2;"
245                           "cmpb (%%r1),(%%r3);"
246			   "bcc 1f;"
247			   "mnegl $1,%%r2;"
248			   "1:;"
249			   "movl %%r2,%0"
250                        : "=g"(ret)
251                        : "r" (cp), "r" (c2)
252                        : "r0","r1","r2","r3","cc");
253        return  ret;
254}
255#endif
256
257#if 0 /* unused, but no point in deleting it since it _is_ an instruction */
258static __inline int __attribute__((__unused__))
259locc(int mask, char *cp, size_t size){
260	register ret;
261
262	__asm volatile("locc %1,%2,(%3);"
263			   "movl %%r0,%0"
264			: "=r" (ret)
265			: "r" (mask),"r"(size),"r"(cp)
266			: "r0","r1" );
267	return	ret;
268}
269#endif
270
271static __inline int __attribute__((__unused__))
272vax_scanc(u_int size, const u_char *cp, const u_char *table, int mask)
273{
274	register int ret;
275
276	__asm volatile("scanc %1,(%2),(%3),%4;"
277			   "movl %%r0,%0"
278			: "=g"(ret)
279			: "r"(size),"r"(cp),"r"(table),"r"(mask)
280			: "r0","r1","r2","r3" );
281	return ret;
282}
283#define scanc vax_scanc
284
285static __inline int __attribute__((__unused__))
286vax_skpc(int mask, size_t size, u_char *cp)
287{
288	register int ret;
289
290	__asm volatile("skpc %1,%2,(%3);"
291			   "movl %%r0,%0"
292			: "=g"(ret)
293			: "r"(mask),"r"(size),"r"(cp)
294			: "r0","r1" );
295	return	ret;
296}
297#define skpc vax_skpc
298
299/*
300 * Set/clear a bit at a memory position; interlocked.
301 * Return 0 if already set, 1 otherwise.
302 */
303static __inline int __attribute__((__unused__))
304bbssi(int bitnr, long *addr)
305{
306	register int ret;
307
308	__asm volatile("clrl %%r0;"
309			   "bbssi %1,%2,1f;"
310			   "incl %%r0;"
311			   "1:;"
312			   "movl %%r0,%0"
313		: "=&r"(ret)
314		: "g"(bitnr),"m"(*addr)
315		: "r0","cc","memory");
316	return ret;
317}
318
319static __inline int __attribute__((__unused__))
320bbcci(int bitnr, long *addr)
321{
322	register int ret;
323
324	__asm volatile("clrl %%r0;"
325			   "bbcci %1,%2,1f;"
326			   "incl %%r0;"
327			   "1:;"
328			   "movl %%r0,%0"
329		: "=&r"(ret)
330		: "g"(bitnr),"m"(*addr)
331		: "r0","cc","memory");
332	return ret;
333}
334
335static inline struct lwp *
336cpu_switchto(struct lwp *oldlwp, struct lwp *newlwp, bool returning)
337{
338	struct lwp *prevlwp;
339	__asm volatile(
340		"movl %1,%%r0;"
341		"movl %2,%%r1;"
342		"movpsl -(%%sp);"
343		"jsb Swtchto;"
344		"movl %%r0,%0"
345	    : "=g"(prevlwp)
346	    : "g" (oldlwp), "g" (newlwp)
347	    : "r0", "r1");
348	return prevlwp;
349}
350
351/*
352 * Interlock instructions. Used both in multiprocessor environments to
353 * lock between CPUs and in uniprocessor systems when locking is required
354 * between I/O devices and the master CPU.
355 */
356/*
357 * Insqti() locks and inserts an element into the end of a queue.
358 * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
359 */
360static __inline int __attribute__((__unused__))
361insqti(void *entry, void *header) {
362	register int ret;
363
364	__asm volatile(
365		"	mnegl $1,%0;"
366		"	insqti (%1),(%2);"
367		"	bcs 1f;"		/* failed insert */
368		"	beql 2f;"		/* jump if first entry */
369		"	movl $1,%0;"
370		"	brb 1f;"
371		"2:	clrl %0;"
372		"	1:;"
373			: "=&g"(ret)
374			: "r"(entry), "r"(header)
375			: "memory");
376
377	return ret;
378}
379
380/*
381 * Remqhi() removes an element from the head of the queue.
382 * Returns -1 if interlock failed, 0 if queue empty, address of the
383 * removed element otherwise.
384 */
385static __inline void *__attribute__((__unused__))
386remqhi(void *header) {
387	register void *ret;
388
389	__asm volatile(
390		"	remqhi (%1),%0;"
391		"	bcs 1f;"		/* failed interlock */
392		"	bvs 2f;"		/* nothing was removed */
393		"	brb 3f;"
394		"1:	mnegl $1,%0;"
395		"	brb 3f;"
396		"2:	clrl %0;"
397		"	3:;"
398			: "=&g"(ret)
399			: "r"(header)
400			: "memory");
401
402	return ret;
403}
404#define	ILCK_FAILED	-1	/* Interlock failed */
405#define	Q_EMPTY		0	/* Queue is/was empty */
406#define	Q_OK		1	/* Inserted OK */
407
408#endif	/* !_VAX_MACROS_H_ && !__lint__ */
409