cpufunc.h revision 202031
1/*	$OpenBSD: pio.h,v 1.2 1998/09/15 10:50:12 pefo Exp $	*/
2
3/*
4 * Copyright (c) 1995-1999 Per Fogelstrom.  All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 *    notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 *    notice, this list of conditions and the following disclaimer in the
13 *    documentation and/or other materials provided with the distribution.
14 * 3. All advertising materials mentioning features or use of this software
15 *    must display the following acknowledgement:
16 *      This product includes software developed by Per Fogelstrom.
17 * 4. The name of the author may not be used to endorse or promote products
18 *    derived from this software without specific prior written permission
19 *
20 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
21 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
22 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
23 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
24 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
25 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
29 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 *
31 *	JNPR: cpufunc.h,v 1.5 2007/08/09 11:23:32 katta
32 * $FreeBSD: head/sys/mips/include/cpufunc.h 202031 2010-01-10 19:50:24Z imp $
33 */
34
35#ifndef _MACHINE_CPUFUNC_H_
36#define	_MACHINE_CPUFUNC_H_
37
38#include <sys/types.h>
39#include <machine/cpuregs.h>
40
41/*
42 * These functions are required by user-land atomi ops
43 */
44
45static __inline void
46mips_barrier(void)
47{
48	__asm __volatile (".set noreorder\n\t"
49			  "nop\n\t"
50			  "nop\n\t"
51			  "nop\n\t"
52			  "nop\n\t"
53			  "nop\n\t"
54			  "nop\n\t"
55			  "nop\n\t"
56			  "nop\n\t"
57			  ".set reorder\n\t"
58			  : : : "memory");
59}
60
61static __inline void
62mips_wbflush(void)
63{
64	__asm __volatile ("sync" : : : "memory");
65	mips_barrier();
66#if 0
67	__asm __volatile("mtc0 %0, $12\n" /* MIPS_COP_0_STATUS */
68	   : : "r" (flag));
69#endif
70}
71
72static __inline void
73mips_read_membar(void)
74{
75	/* Nil */
76}
77
78static __inline void
79mips_write_membar(void)
80{
81	mips_wbflush();
82}
83
84#ifdef _KERNEL
85
86static __inline void
87mips_tlbp(void)
88{
89	__asm __volatile ("tlbp");
90	mips_barrier();
91#if 0
92	register_t ret;
93	register_t tmp;
94
95	__asm __volatile("mfc0	%0, $12\n" /* MIPS_COP_0_STATUS */
96	 		 "and	%1, %0, $~1\n" /* MIPS_SR_INT_IE */
97			 "mtc0	%1, $12\n" /* MIPS_COP_0_STATUS */
98			 : "=r" (ret), "=r" (tmp));
99	return (ret);
100#endif
101}
102
103static __inline void
104mips_tlbr(void)
105{
106	__asm __volatile ("tlbr");
107	mips_barrier();
108}
109
110static __inline void
111mips_tlbwi(void)
112{
113	__asm __volatile ("tlbwi");
114	mips_barrier();
115#if 0
116	__asm __volatile("mfc %0, $12\n" /* MIPS_COP_0_STATUS */
117	    		 "or  %0, %0, $1\n" /* MIPS_SR_INT_IE */
118			 "mtc0 %0, $12\n" /* MIPS_COP_0_STATUS */
119			 : "=r" (tmp));
120#endif
121}
122
123static __inline void
124mips_tlbwr(void)
125{
126	__asm __volatile ("tlbwr");
127	mips_barrier();
128}
129
130
131#if 0	/* XXX mips64 */
132
133#define	MIPS_RDRW64_COP0(n,r)					\
134static __inline uint64_t					\
135mips_rd_ ## n (void)						\
136{								\
137	int v0;							\
138	__asm __volatile ("dmfc0 %[v0], $"__XSTRING(r)";"	\
139			  : [v0] "=&r"(v0));			\
140	mips_barrier();						\
141	return (v0);						\
142}								\
143static __inline void						\
144mips_wr_ ## n (uint64_t a0)					\
145{								\
146	__asm __volatile ("dmtc0 %[a0], $"__XSTRING(r)";"	\
147			 __XSTRING(COP0_SYNC)";"		\
148			 "nop;"					\
149			 "nop;"					\
150			 :					\
151			 : [a0] "r"(a0));			\
152	mips_barrier();						\
153} struct __hack
154
155MIPS_RDRW64_COP0(entrylo0, MIPS_COP_0_TLB_LO0);
156MIPS_RDRW64_COP0(entrylo1, MIPS_COP_0_TLB_LO1);
157MIPS_RDRW64_COP0(entryhi, MIPS_COP_0_TLB_HI);
158MIPS_RDRW64_COP0(pagemask, MIPS_COP_0_TLB_PG_MASK);
159MIPS_RDRW64_COP0(xcontext, MIPS_COP_0_TLB_XCONTEXT);
160
161#undef	MIPS_RDRW64_COP0
162#endif
163
164#define	MIPS_RDRW32_COP0(n,r)					\
165static __inline uint32_t					\
166mips_rd_ ## n (void)						\
167{								\
168	int v0;							\
169	__asm __volatile ("mfc0 %[v0], $"__XSTRING(r)";"	\
170			  : [v0] "=&r"(v0));			\
171	mips_barrier();						\
172	return (v0);						\
173}								\
174static __inline void						\
175mips_wr_ ## n (uint32_t a0)					\
176{								\
177	__asm __volatile ("mtc0 %[a0], $"__XSTRING(r)";"	\
178			 __XSTRING(COP0_SYNC)";"		\
179			 "nop;"					\
180			 "nop;"					\
181			 :					\
182			 : [a0] "r"(a0));			\
183	mips_barrier();						\
184} struct __hack
185
186#define	MIPS_RDRW32_COP0_SEL(n,r,s)					\
187static __inline uint32_t					\
188mips_rd_ ## n ## s(void)						\
189{								\
190	int v0;							\
191	__asm __volatile ("mfc0 %[v0], $"__XSTRING(r)", "__XSTRING(s)";"	\
192			  : [v0] "=&r"(v0));			\
193	mips_barrier();						\
194	return (v0);						\
195}								\
196static __inline void						\
197mips_wr_ ## n ## s(uint32_t a0)					\
198{								\
199	__asm __volatile ("mtc0 %[a0], $"__XSTRING(r)", "__XSTRING(s)";"	\
200			 __XSTRING(COP0_SYNC)";"		\
201			 "nop;"					\
202			 "nop;"					\
203			 :					\
204			 : [a0] "r"(a0));			\
205	mips_barrier();						\
206} struct __hack
207
208#ifdef TARGET_OCTEON
209static __inline void mips_sync_icache (void)
210{
211	__asm __volatile (
212		".set push\n"
213		".set mips64\n"
214		".word 0x041f0000\n"		/* xxx ICACHE */
215		"nop\n"
216		".set pop\n"
217		: : );
218}
219#endif
220
221MIPS_RDRW32_COP0(compare, MIPS_COP_0_COMPARE);
222MIPS_RDRW32_COP0(config, MIPS_COP_0_CONFIG);
223MIPS_RDRW32_COP0_SEL(config, MIPS_COP_0_CONFIG, 1);
224MIPS_RDRW32_COP0_SEL(config, MIPS_COP_0_CONFIG, 2);
225MIPS_RDRW32_COP0_SEL(config, MIPS_COP_0_CONFIG, 3);
226MIPS_RDRW32_COP0(count, MIPS_COP_0_COUNT);
227MIPS_RDRW32_COP0(index, MIPS_COP_0_TLB_INDEX);
228MIPS_RDRW32_COP0(wired, MIPS_COP_0_TLB_WIRED);
229MIPS_RDRW32_COP0(cause, MIPS_COP_0_CAUSE);
230MIPS_RDRW32_COP0(status, MIPS_COP_0_STATUS);
231
232/* XXX: Some of these registers are specific to MIPS32. */
233MIPS_RDRW32_COP0(entrylo0, MIPS_COP_0_TLB_LO0);
234MIPS_RDRW32_COP0(entrylo1, MIPS_COP_0_TLB_LO1);
235MIPS_RDRW32_COP0(entrylow, MIPS_COP_0_TLB_LOW);
236MIPS_RDRW32_COP0(entryhi, MIPS_COP_0_TLB_HI);
237MIPS_RDRW32_COP0(pagemask, MIPS_COP_0_TLB_PG_MASK);
238MIPS_RDRW32_COP0(prid, MIPS_COP_0_PRID);
239MIPS_RDRW32_COP0(watchlo, MIPS_COP_0_WATCH_LO);
240MIPS_RDRW32_COP0_SEL(watchlo, MIPS_COP_0_WATCH_LO, 1);
241MIPS_RDRW32_COP0_SEL(watchlo, MIPS_COP_0_WATCH_LO, 2);
242MIPS_RDRW32_COP0_SEL(watchlo, MIPS_COP_0_WATCH_LO, 3);
243MIPS_RDRW32_COP0(watchhi, MIPS_COP_0_WATCH_HI);
244MIPS_RDRW32_COP0_SEL(watchhi, MIPS_COP_0_WATCH_HI, 1);
245MIPS_RDRW32_COP0_SEL(watchhi, MIPS_COP_0_WATCH_HI, 2);
246MIPS_RDRW32_COP0_SEL(watchhi, MIPS_COP_0_WATCH_HI, 3);
247#undef	MIPS_RDRW32_COP0
248
249static __inline register_t
250intr_disable(void)
251{
252	register_t s;
253
254	s = mips_rd_status();
255	mips_wr_status(s & ~MIPS_SR_INT_IE);
256
257	return (s);
258}
259
260static __inline register_t
261intr_enable(void)
262{
263	register_t s;
264
265	s = mips_rd_status();
266	mips_wr_status(s | MIPS_SR_INT_IE);
267
268	return (s);
269}
270
271#define	intr_restore(s)	mips_wr_status((s))
272
273static __inline void
274breakpoint(void)
275{
276	__asm __volatile ("break");
277}
278
279#endif /* _KERNEL */
280
281#define	readb(va)	(*(volatile uint8_t *) (va))
282#define	readw(va)	(*(volatile uint16_t *) (va))
283#define	readl(va)	(*(volatile uint32_t *) (va))
284
285#define	writeb(va, d)	(*(volatile uint8_t *) (va) = (d))
286#define	writew(va, d)	(*(volatile uint16_t *) (va) = (d))
287#define	writel(va, d)	(*(volatile uint32_t *) (va) = (d))
288
289/*
290 * I/O macros.
291 */
292
293#define	outb(a,v)	(*(volatile unsigned char*)(a) = (v))
294#define	out8(a,v)	(*(volatile unsigned char*)(a) = (v))
295#define	outw(a,v)	(*(volatile unsigned short*)(a) = (v))
296#define	out16(a,v)	outw(a,v)
297#define	outl(a,v)	(*(volatile unsigned int*)(a) = (v))
298#define	out32(a,v)	outl(a,v)
299#define	inb(a)		(*(volatile unsigned char*)(a))
300#define	in8(a)		(*(volatile unsigned char*)(a))
301#define	inw(a)		(*(volatile unsigned short*)(a))
302#define	in16(a)		inw(a)
303#define	inl(a)		(*(volatile unsigned int*)(a))
304#define	in32(a)		inl(a)
305
306#define	out8rb(a,v)	(*(volatile unsigned char*)(a) = (v))
307#define	out16rb(a,v)	(__out16rb((volatile uint16_t *)(a), v))
308#define	out32rb(a,v)	(__out32rb((volatile uint32_t *)(a), v))
309#define	in8rb(a)	(*(volatile unsigned char*)(a))
310#define	in16rb(a)	(__in16rb((volatile uint16_t *)(a)))
311#define	in32rb(a)	(__in32rb((volatile uint32_t *)(a)))
312
313#define	_swap_(x)	(((x) >> 24) | ((x) << 24) | \
314	    (((x) >> 8) & 0xff00) | (((x) & 0xff00) << 8))
315
316static __inline void __out32rb(volatile uint32_t *, uint32_t);
317static __inline void __out16rb(volatile uint16_t *, uint16_t);
318static __inline uint32_t __in32rb(volatile uint32_t *);
319static __inline uint16_t __in16rb(volatile uint16_t *);
320
321static __inline void
322__out32rb(volatile uint32_t *a, uint32_t v)
323{
324	uint32_t _v_ = v;
325
326	_v_ = _swap_(_v_);
327	out32(a, _v_);
328}
329
330static __inline void
331__out16rb(volatile uint16_t *a, uint16_t v)
332{
333	uint16_t _v_;
334
335	_v_ = ((v >> 8) & 0xff) | (v << 8);
336	out16(a, _v_);
337}
338
339static __inline uint32_t
340__in32rb(volatile uint32_t *a)
341{
342	uint32_t _v_;
343
344	_v_ = in32(a);
345	_v_ = _swap_(_v_);
346	return _v_;
347}
348
349static __inline uint16_t
350__in16rb(volatile uint16_t *a)
351{
352	uint16_t _v_;
353
354	_v_ = in16(a);
355	_v_ = ((_v_ >> 8) & 0xff) | (_v_ << 8);
356	return _v_;
357}
358
359void insb(uint8_t *, uint8_t *,int);
360void insw(uint16_t *, uint16_t *,int);
361void insl(uint32_t *, uint32_t *,int);
362void outsb(uint8_t *, const uint8_t *,int);
363void outsw(uint16_t *, const uint16_t *,int);
364void outsl(uint32_t *, const uint32_t *,int);
365u_int loadandclear(volatile u_int *addr);
366
367#endif /* !_MACHINE_CPUFUNC_H_ */
368