cpufunc.h revision 220939
1/*-
2 * Copyright (c) 2001 Jake Burkholder.
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 *    notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 *    notice, this list of conditions and the following disclaimer in the
12 *    documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
26 * $FreeBSD: head/sys/sparc64/include/cpufunc.h 220939 2011-04-22 09:31:40Z marius $
27 */
28
29#ifndef	_MACHINE_CPUFUNC_H_
30#define	_MACHINE_CPUFUNC_H_
31
32#include <machine/asi.h>
33#include <machine/pstate.h>
34
35struct thread;
36
37/*
38 * Membar operand macros for use in other macros when # is a special
39 * character.  Keep these in sync with what the hardware expects.
40 */
41#define	C_Lookaside	(0)
42#define	C_MemIssue	(1)
43#define	C_Sync		(2)
44#define	M_LoadLoad	(0)
45#define	M_StoreLoad	(1)
46#define	M_LoadStore	(2)
47#define	M_StoreStore	(3)
48
49#define	CMASK_SHIFT	(4)
50#define	MMASK_SHIFT	(0)
51
52#define	CMASK_GEN(bit)	((1 << (bit)) << CMASK_SHIFT)
53#define	MMASK_GEN(bit)	((1 << (bit)) << MMASK_SHIFT)
54
55#define	Lookaside	CMASK_GEN(C_Lookaside)
56#define	MemIssue	CMASK_GEN(C_MemIssue)
57#define	Sync		CMASK_GEN(C_Sync)
58#define	LoadLoad	MMASK_GEN(M_LoadLoad)
59#define	StoreLoad	MMASK_GEN(M_StoreLoad)
60#define	LoadStore	MMASK_GEN(M_LoadStore)
61#define	StoreStore	MMASK_GEN(M_StoreStore)
62
63#define	casa(rs1, rs2, rd, asi) ({					\
64	u_int __rd = (uint32_t)(rd);					\
65	__asm __volatile("casa [%2] %3, %4, %0"				\
66	    : "+r" (__rd), "=m" (*rs1)					\
67	    : "r" (rs1), "n" (asi), "r" (rs2), "m" (*rs1));		\
68	__rd;								\
69})
70
71#define	casxa(rs1, rs2, rd, asi) ({					\
72	u_long __rd = (uint64_t)(rd);					\
73	__asm __volatile("casxa [%2] %3, %4, %0"			\
74	    : "+r" (__rd), "=m" (*rs1)					\
75	    : "r" (rs1), "n" (asi), "r" (rs2), "m" (*rs1));		\
76	__rd;								\
77})
78
79#define	flush(va) do {							\
80	__asm __volatile("flush %0" : : "r" (va));			\
81} while (0)
82
83#define	flushw() do {							\
84	__asm __volatile("flushw" : :);					\
85} while (0)
86
87#define	mov(val, reg) do {						\
88	__asm __volatile("mov %0, %" __XSTRING(reg) : : "r" (val));	\
89} while (0)
90
91/* Generate ld*a/st*a functions for non-constant ASIs. */
92#define	LDNC_GEN(tp, o)							\
93	static __inline tp						\
94	o ## _nc(caddr_t va, int asi)					\
95	{								\
96		tp r;							\
97		__asm __volatile("wr %2, 0, %%asi;" #o " [%1] %%asi, %0"\
98		    : "=r" (r) : "r" (va), "r" (asi));			\
99		return (r);						\
100	}
101
102LDNC_GEN(u_char, lduba);
103LDNC_GEN(u_short, lduha);
104LDNC_GEN(u_int, lduwa);
105LDNC_GEN(u_long, ldxa);
106
107#define	LD_GENERIC(va, asi, op, type) ({				\
108	type __r;							\
109	__asm __volatile(#op " [%1] %2, %0"				\
110	    : "=r" (__r) : "r" (va), "n" (asi));			\
111	__r;								\
112})
113
114#define	lduba(va, asi)	LD_GENERIC(va, asi, lduba, u_char)
115#define	lduha(va, asi)	LD_GENERIC(va, asi, lduha, u_short)
116#define	lduwa(va, asi)	LD_GENERIC(va, asi, lduwa, u_int)
117#define	ldxa(va, asi)	LD_GENERIC(va, asi, ldxa, u_long)
118
119#define	STNC_GEN(tp, o)							\
120	static __inline void						\
121	o ## _nc(caddr_t va, int asi, tp val)				\
122	{								\
123		__asm __volatile("wr %2, 0, %%asi;" #o " %0, [%1] %%asi"\
124		    : : "r" (val), "r" (va), "r" (asi));		\
125	}
126
127STNC_GEN(u_char, stba);
128STNC_GEN(u_short, stha);
129STNC_GEN(u_int, stwa);
130STNC_GEN(u_long, stxa);
131
132#define	ST_GENERIC(va, asi, val, op)					\
133	__asm __volatile(#op " %0, [%1] %2"				\
134	    : : "r" (val), "r" (va), "n" (asi));			\
135
136#define	stba(va, asi, val)	ST_GENERIC(va, asi, val, stba)
137#define	stha(va, asi, val)	ST_GENERIC(va, asi, val, stha)
138#define	stwa(va, asi, val)	ST_GENERIC(va, asi, val, stwa)
139#define	stxa(va, asi, val)	ST_GENERIC(va, asi, val, stxa)
140
141/*
142 * Attempt to read from addr, val.  If a Data Access Error trap happens,
143 * they return -1 and the contents of val is undefined.  A return of 0
144 * means no trap happened, and the contents of val is valid.
145 */
146int fasword8(u_long asi, void *addr, uint8_t *val);
147int fasword16(u_long asi, void *addr, uint16_t *val);
148int fasword32(u_long asi, void *addr, uint32_t *val);
149
150#define	membar(mask) do {						\
151	__asm __volatile("membar %0" : : "n" (mask) : "memory");	\
152} while (0)
153
154#define	rd(name) ({							\
155	uint64_t __sr;							\
156	__asm __volatile("rd %%" #name ", %0" : "=r" (__sr) :);		\
157	__sr;								\
158})
159
160#define	wr(name, val, xorval) do {					\
161	__asm __volatile("wr %0, %1, %%" #name				\
162	    : : "r" (val), "rI" (xorval));				\
163} while (0)
164
165#define	rdpr(name) ({							\
166	uint64_t __pr;							\
167	__asm __volatile("rdpr %%" #name", %0" : "=r" (__pr) :);	\
168	__pr;								\
169})
170
171#define	wrpr(name, val, xorval) do {					\
172	__asm __volatile("wrpr %0, %1, %%" #name			\
173	    : : "r" (val), "rI" (xorval));				\
174} while (0)
175
176/*
177 * Trick GAS/GCC into compiling access to TICK/(S)TICK_COMPARE independently
178 * of the selected instruction set.
179 */
180#define	rdtickcmpr()			rd(asr23)
181#define	rdstick()			rd(asr24)
182#define	rdstickcmpr()			rd(asr25)
183#define	wrtickcmpr(val, xorval)		wr(asr23, (val), (xorval))
184#define	wrstick(val, xorval)		wr(asr24, (val), (xorval))
185#define	wrstickcmpr(val, xorval)	wr(asr25, (val), (xorval))
186
187/*
188 * Macro intended to be used instead of wr(asr23, val, xorval) for writing to
189 * the TICK_COMPARE register in order to avoid a bug in BlackBird CPUs that
190 * can cause these writes to fail under certain conditions which in turn
191 * causes the hardclock to stop.  The workaround is to read the TICK_COMPARE
192 * register back immediately after writing to it with these two instructions
193 * aligned to a quadword boundary in order to ensure that I$ misses won't
194 * split them up.
195 */
196#define	wrtickcmpr_bbwar(val, xorval) ({				\
197	__asm __volatile(						\
198	"	ba,pt	%%xcc, 1f ;		"			\
199	"	 nop	 ;			"			\
200	"	.align	128 ;			"			\
201	"1:	wr	%0, %1, %%asr23 ;	"			\
202	"	rd	%%asr23, %%g0 ;		"			\
203	: : "r" (val), "rI" (xorval));					\
204})
205
206static __inline void
207breakpoint(void)
208{
209
210	__asm __volatile("ta %%xcc, 1" : :);
211}
212
213static __inline register_t
214intr_disable(void)
215{
216	register_t s;
217
218	s = rdpr(pstate);
219	wrpr(pstate, s & ~PSTATE_IE, 0);
220	return (s);
221}
222#define	intr_restore(s)	wrpr(pstate, (s), 0)
223
224/*
225 * In some places, it is required that the store is directly followed by a
226 * membar #Sync.  Don't trust the compiler to not insert instructions in
227 * between.  We also need to disable interrupts completely.
228 */
229#define	stxa_sync(va, asi, val) do {					\
230	register_t s;							\
231	s = intr_disable();						\
232	__asm __volatile("stxa %0, [%1] %2; membar #Sync"		\
233	    : : "r" (val), "r" (va), "n" (asi));			\
234	intr_restore(s);						\
235} while (0)
236
237void ascopy(u_long asi, vm_offset_t src, vm_offset_t dst, size_t len);
238void ascopyfrom(u_long sasi, vm_offset_t src, caddr_t dst, size_t len);
239void ascopyto(caddr_t src, u_long dasi, vm_offset_t dst, size_t len);
240void aszero(u_long asi, vm_offset_t dst, size_t len);
241
242/*
243 * Ultrasparc II doesn't implement popc in hardware.
244 */
245#if 0
246#define	HAVE_INLINE_FFS
247/*
248 * See page 202 of the SPARC v9 Architecture Manual.
249 */
250static __inline int
251ffs(int mask)
252{
253	int result;
254	int neg;
255	int tmp;
256
257	__asm __volatile(
258	"	neg	%3, %1 ;	"
259	"	xnor	%3, %1, %2 ;	"
260	"	popc	%2, %0 ;	"
261	"	movrz	%3, %%g0, %0 ;	"
262	: "=r" (result), "=r" (neg), "=r" (tmp) : "r" (mask));
263	return (result);
264}
265#endif
266
267#undef LDNC_GEN
268#undef STNC_GEN
269
270#endif /* !_MACHINE_CPUFUNC_H_ */
271