cpufunc.h revision 80709
193208Smurray/*-
293208Smurray * Copyright (c) 2001 Jake Burkholder.
393208Smurray * All rights reserved.
493208Smurray *
593208Smurray * Redistribution and use in source and binary forms, with or without
693208Smurray * modification, are permitted provided that the following conditions
793208Smurray * are met:
893208Smurray * 1. Redistributions of source code must retain the above copyright
993208Smurray *    notice, this list of conditions and the following disclaimer.
1093208Smurray * 2. Redistributions in binary form must reproduce the above copyright
1193208Smurray *    notice, this list of conditions and the following disclaimer in the
1293208Smurray *    documentation and/or other materials provided with the distribution.
1393208Smurray *
1493208Smurray * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
1593208Smurray * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1693208Smurray * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1793208Smurray * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
1893208Smurray * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
1993208Smurray * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
2093208Smurray * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
2193208Smurray * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
2293208Smurray * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
2393208Smurray * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
2493208Smurray * SUCH DAMAGE.
2593208Smurray *
2693208Smurray * $FreeBSD: head/sys/sparc64/include/cpufunc.h 80709 2001-07-31 06:05:05Z jake $
2793208Smurray */
2893208Smurray
2993208Smurray#ifndef	_MACHINE_CPUFUNC_H_
3093208Smurray#define	_MACHINE_CPUFUNC_H_
3193208Smurray
3293208Smurray#include <machine/asi.h>
3393208Smurray#include <machine/pstate.h>
34113764Smurray
35113764Smurray/*
3693208Smurray * membar operand macros for use in other macros when # is a special
3793208Smurray * character.  Keep these in sync with what the hardware expects.
3893208Smurray */
3993208Smurray#define	C_Lookaside	(0)
4093208Smurray#define	C_MemIssue	(1)
4193208Smurray#define	C_Sync		(2)
4293208Smurray#define	M_LoadLoad	(0)
4393208Smurray#define	M_StoreLoad	(1)
4493208Smurray#define	M_LoadStore	(2)
4593208Smurray#define	M_StoreStore	(3)
4693208Smurray
4793208Smurray#define	CMASK_SHIFT	(4)
4893208Smurray#define	MMASK_SHIFT	(0)
4993208Smurray
5093208Smurray#define	CMASK_GEN(bit)	((1 << (bit)) << CMASK_SHIFT)
5193208Smurray#define	MMASK_GEN(bit)	((1 << (bit)) << MMASK_SHIFT)
5293208Smurray
5393208Smurray#define	Lookaside	CMASK_GEN(C_Lookaside)
5493208Smurray#define	MemIssue	CMASK_GEN(C_MemIssue)
55113764Smurray#define	Sync		CMASK_GEN(C_Sync)
5693208Smurray#define	LoadLoad	MMASK_GEN(M_LoadLoad)
5793208Smurray#define	StoreLoad	MMASK_GEN(M_StoreLoad)
58113764Smurray#define	LoadStore	MMASK_GEN(M_LoadStore)
5993208Smurray#define	StoreStore	MMASK_GEN(M_StoreStore)
6093208Smurray
6193208Smurray#define	casa(rs1, rs2, rd, asi) ({					\
6293208Smurray	u_int __rd = (u_int32_t)(rd);					\
6393208Smurray	__asm __volatile("casa [%1] %2, %3, %0"				\
6493208Smurray	    : "+r" (__rd) : "r" (rs1), "n" (asi), "r" (rs2));		\
6593208Smurray	__rd;								\
6693208Smurray})
6793208Smurray
6893208Smurray#define	casxa(rs1, rs2, rd, asi) ({					\
6993208Smurray	u_long __rd = (u_int64_t)(rd);					\
7093208Smurray	__asm __volatile("casxa [%1] %2, %3, %0"			\
7193208Smurray	    : "+r" (__rd) : "r" (rs1), "n" (asi), "r" (rs2));		\
7293208Smurray	__rd;								\
7393208Smurray})
74113764Smurray
7593208Smurray#define	flush(va) do {							\
7693208Smurray	__asm __volatile("flush %0" : : "r" (va));			\
7793208Smurray} while (0)
7893208Smurray
79113764Smurray#define	ldxa(va, asi) ({						\
8093208Smurray	u_long __r;							\
8193208Smurray	__asm __volatile("ldxa [%1] %2, %0"				\
8293208Smurray	    : "=r" (__r) : "r" (va), "n" (asi));			\
8393208Smurray	__r;								\
8493208Smurray})
8593208Smurray
8693208Smurray#define	stxa(va, asi, val) do {						\
8793208Smurray	__asm __volatile("stxa %0, [%1] %2"				\
8893208Smurray	    : : "r" (val), "r" (va), "n" (asi));			\
8993208Smurray} while (0)
9093208Smurray
9193208Smurray#define	membar(mask) do {						\
9293208Smurray	__asm __volatile("membar %0" : : "n" (mask));			\
9393208Smurray} while (0)
9493208Smurray
9593208Smurray#define	rd(name) ({							\
9693208Smurray	u_int64_t __sr;							\
9793208Smurray	__asm __volatile("rd %%" #name ", %0" : "=r" (__sr) :);		\
9893208Smurray	__sr;								\
9993208Smurray})
10093208Smurray
10193208Smurray#define	wr(name, val, xor) do {						\
10293208Smurray	__asm __volatile("wr %0, %1, %%" #name				\
10393208Smurray	    : : "r" (val), "rI" (xor));					\
10493208Smurray} while (0)
10593208Smurray
10693208Smurray#define	rdpr(name) ({							\
10793208Smurray	u_int64_t __pr;							\
10893208Smurray	__asm __volatile("rdpr %%" #name", %0" : "=r" (__pr) :);	\
10993208Smurray	__pr;								\
11093208Smurray})
11193208Smurray
11293208Smurray#define	wrpr(name, val, xor) do {					\
11393208Smurray	__asm __volatile("wrpr %0, %1, %%" #name			\
11493208Smurray	    : : "r" (val), "rI" (xor));					\
11593208Smurray} while (0)
11693208Smurray
11793208Smurraystatic __inline void
11893208Smurraybreakpoint(void)
11993208Smurray{
12093208Smurray	__asm __volatile("ta 1");
12193208Smurray}
12293208Smurray
12393208Smurray/*
12493208Smurray * XXX use %pil for these.
12593208Smurray */
12693208Smurraystatic __inline critical_t
12793208Smurraycritical_enter(void)
12893208Smurray{
12993208Smurray	critical_t ie;
13093208Smurray
13193208Smurray	ie = rdpr(pstate);
13293208Smurray	if (ie & PSTATE_IE)
13393208Smurray		wrpr(pstate, ie, PSTATE_IE);
13493208Smurray	return (ie);
13593208Smurray}
13693208Smurray
13793208Smurraystatic __inline void
13893208Smurraycritical_exit(critical_t ie)
13993208Smurray{
14093208Smurray
14193208Smurray	if (ie & PSTATE_IE)
14293208Smurray		wrpr(pstate, ie, 0);
14393208Smurray}
14493208Smurray
14593208Smurray#if 0
14693208Smurray#define	HAVE_INLINE_FFS
14793208Smurray/*
14893208Smurray * See page 202 of the SPARC v9 Architecture Manual.
149113764Smurray */
150static __inline int
151ffs(int mask)
152{
153	int result;
154	int neg;
155	int tmp;
156
157	__asm __volatile(
158	"	neg	%3, %1 ;	"
159	"	xnor	%3, %1, %2 ;	"
160	"	popc	%2, %0 ;	"
161	"	movrz	%3, %%g0, %0 ;	"
162	: "=r" (result), "=r" (neg), "=r" (tmp) : "r" (mask));
163	return (result);
164}
165#endif
166
167#endif /* !_MACHINE_CPUFUNC_H_ */
168