166458Sdfr/*-
266458Sdfr * Copyright (c) 2004-2005 David Schultz <das@FreeBSD.ORG>
366458Sdfr * All rights reserved.
4139790Simp *
566458Sdfr * Redistribution and use in source and binary forms, with or without
666458Sdfr * modification, are permitted provided that the following conditions
766458Sdfr * are met:
866458Sdfr * 1. Redistributions of source code must retain the above copyright
966458Sdfr *    notice, this list of conditions and the following disclaimer.
1066458Sdfr * 2. Redistributions in binary form must reproduce the above copyright
1166458Sdfr *    notice, this list of conditions and the following disclaimer in the
1266458Sdfr *    documentation and/or other materials provided with the distribution.
1366458Sdfr *
1466458Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
1566458Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1666458Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1766458Sdfr * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
1866458Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
1966458Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
2066458Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
2166458Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
2266458Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
2366458Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
2466458Sdfr * SUCH DAMAGE.
2566458Sdfr *
2666458Sdfr * $FreeBSD$
2766458Sdfr */
2866458Sdfr
2966458Sdfr#ifndef	_FENV_H_
3066458Sdfr#define	_FENV_H_
3166458Sdfr
3266458Sdfr#include <sys/_types.h>
3366458Sdfr
34105014Smike#ifndef	__fenv_static
35105014Smike#define	__fenv_static	static
36105014Smike#endif
3766458Sdfr
3866458Sdfrtypedef	__uint32_t	fenv_t;
39105014Smiketypedef	__uint32_t	fexcept_t;
4094375Sdfr
4194375Sdfr/* Exception flags */
4294375Sdfr#define	FE_INEXACT	0x02000000
4394375Sdfr#define	FE_DIVBYZERO	0x04000000
4494375Sdfr#define	FE_UNDERFLOW	0x08000000
4594375Sdfr#define	FE_OVERFLOW	0x10000000
4694375Sdfr#define	FE_INVALID	0x20000000	/* all types of invalid FP ops */
4794375Sdfr
4894375Sdfr/*
4994375Sdfr * The PowerPC architecture has extra invalid flags that indicate the
5094375Sdfr * specific type of invalid operation occurred.  These flags may be
51105014Smike * tested, set, and cleared---but not masked---separately.  All of
5294375Sdfr * these bits are cleared when FE_INVALID is cleared, but only
5366458Sdfr * FE_VXSOFT is set when FE_INVALID is explicitly set in software.
5466458Sdfr */
5566458Sdfr#define	FE_VXCVI	0x00000100	/* invalid integer convert */
5666458Sdfr#define	FE_VXSQRT	0x00000200	/* square root of a negative */
5766458Sdfr#define	FE_VXSOFT	0x00000400	/* software-requested exception */
5866458Sdfr#define	FE_VXVC		0x00080000	/* ordered comparison involving NaN */
5966458Sdfr#define	FE_VXIMZ	0x00100000	/* inf * 0 */
6066458Sdfr#define	FE_VXZDZ	0x00200000	/* 0 / 0 */
61105014Smike#define	FE_VXIDI	0x00400000	/* inf / inf */
62115084Smarcel#define	FE_VXISI	0x00800000	/* inf - inf */
63115084Smarcel#define	FE_VXSNAN	0x01000000	/* operation on a signalling NaN */
6466458Sdfr#define	FE_ALL_INVALID	(FE_VXCVI | FE_VXSQRT | FE_VXSOFT | FE_VXVC | \
6566458Sdfr			 FE_VXIMZ | FE_VXZDZ | FE_VXIDI | FE_VXISI | \
6666458Sdfr			 FE_VXSNAN | FE_INVALID)
6766458Sdfr#define	FE_ALL_EXCEPT	(FE_DIVBYZERO | FE_INEXACT | \
6883047Sobrien			 FE_ALL_INVALID | FE_OVERFLOW | FE_UNDERFLOW)
69115084Smarcel
70115084Smarcel/* Rounding modes */
71115084Smarcel#define	FE_TONEAREST	0x0000
72115084Smarcel#define	FE_TOWARDZERO	0x0001
73115084Smarcel#define	FE_UPWARD	0x0002
74115084Smarcel#define	FE_DOWNWARD	0x0003
75115084Smarcel#define	_ROUND_MASK	(FE_TONEAREST | FE_DOWNWARD | \
76115084Smarcel			 FE_UPWARD | FE_TOWARDZERO)
77115084Smarcel
7866458Sdfr__BEGIN_DECLS
79105014Smike
8066458Sdfr/* Default floating-point environment */
8166458Sdfrextern const fenv_t	__fe_dfl_env;
82#define	FE_DFL_ENV	(&__fe_dfl_env)
83
84/* We need to be able to map status flag positions to mask flag positions */
85#define	_FPUSW_SHIFT	22
86#define	_ENABLE_MASK	((FE_DIVBYZERO | FE_INEXACT | FE_INVALID | \
87			 FE_OVERFLOW | FE_UNDERFLOW) >> _FPUSW_SHIFT)
88
89#ifndef _SOFT_FLOAT
90#define	__mffs(__env)	__asm __volatile("mffs %0" : "=f" (*(__env)))
91#define	__mtfsf(__env)	__asm __volatile("mtfsf 255,%0" : : "f" (__env))
92#else
93#define	__mffs(__env)
94#define	__mtfsf(__env)
95#endif
96
97union __fpscr {
98	double __d;
99	struct {
100		__uint32_t __junk;
101		fenv_t __reg;
102	} __bits;
103};
104
105__fenv_static inline int
106feclearexcept(int __excepts)
107{
108	union __fpscr __r;
109
110	if (__excepts & FE_INVALID)
111		__excepts |= FE_ALL_INVALID;
112	__mffs(&__r.__d);
113	__r.__bits.__reg &= ~__excepts;
114	__mtfsf(__r.__d);
115	return (0);
116}
117
118__fenv_static inline int
119fegetexceptflag(fexcept_t *__flagp, int __excepts)
120{
121	union __fpscr __r;
122
123	__mffs(&__r.__d);
124	*__flagp = __r.__bits.__reg & __excepts;
125	return (0);
126}
127
128__fenv_static inline int
129fesetexceptflag(const fexcept_t *__flagp, int __excepts)
130{
131	union __fpscr __r;
132
133	if (__excepts & FE_INVALID)
134		__excepts |= FE_ALL_EXCEPT;
135	__mffs(&__r.__d);
136	__r.__bits.__reg &= ~__excepts;
137	__r.__bits.__reg |= *__flagp & __excepts;
138	__mtfsf(__r.__d);
139	return (0);
140}
141
142__fenv_static inline int
143feraiseexcept(int __excepts)
144{
145	union __fpscr __r;
146
147	if (__excepts & FE_INVALID)
148		__excepts |= FE_VXSOFT;
149	__mffs(&__r.__d);
150	__r.__bits.__reg |= __excepts;
151	__mtfsf(__r.__d);
152	return (0);
153}
154
155__fenv_static inline int
156fetestexcept(int __excepts)
157{
158	union __fpscr __r;
159
160	__mffs(&__r.__d);
161	return (__r.__bits.__reg & __excepts);
162}
163
164__fenv_static inline int
165fegetround(void)
166{
167	union __fpscr __r;
168
169	__mffs(&__r.__d);
170	return (__r.__bits.__reg & _ROUND_MASK);
171}
172
173__fenv_static inline int
174fesetround(int __round)
175{
176	union __fpscr __r;
177
178	if (__round & ~_ROUND_MASK)
179		return (-1);
180	__mffs(&__r.__d);
181	__r.__bits.__reg &= ~_ROUND_MASK;
182	__r.__bits.__reg |= __round;
183	__mtfsf(__r.__d);
184	return (0);
185}
186
187__fenv_static inline int
188fegetenv(fenv_t *__envp)
189{
190	union __fpscr __r;
191
192	__mffs(&__r.__d);
193	*__envp = __r.__bits.__reg;
194	return (0);
195}
196
197__fenv_static inline int
198feholdexcept(fenv_t *__envp)
199{
200	union __fpscr __r;
201
202	__mffs(&__r.__d);
203	*__envp = __r.__d;
204	__r.__bits.__reg &= ~(FE_ALL_EXCEPT | _ENABLE_MASK);
205	__mtfsf(__r.__d);
206	return (0);
207}
208
209__fenv_static inline int
210fesetenv(const fenv_t *__envp)
211{
212	union __fpscr __r;
213
214	__r.__bits.__reg = *__envp;
215	__mtfsf(__r.__d);
216	return (0);
217}
218
219__fenv_static inline int
220feupdateenv(const fenv_t *__envp)
221{
222	union __fpscr __r;
223
224	__mffs(&__r.__d);
225	__r.__bits.__reg &= FE_ALL_EXCEPT;
226	__r.__bits.__reg |= *__envp;
227	__mtfsf(__r.__d);
228	return (0);
229}
230
231#if __BSD_VISIBLE
232
233/* We currently provide no external definitions of the functions below. */
234
235static inline int
236feenableexcept(int __mask)
237{
238	union __fpscr __r;
239	fenv_t __oldmask;
240
241	__mffs(&__r.__d);
242	__oldmask = __r.__bits.__reg;
243	__r.__bits.__reg |= (__mask & FE_ALL_EXCEPT) >> _FPUSW_SHIFT;
244	__mtfsf(__r.__d);
245	return ((__oldmask & _ENABLE_MASK) << _FPUSW_SHIFT);
246}
247
248static inline int
249fedisableexcept(int __mask)
250{
251	union __fpscr __r;
252	fenv_t __oldmask;
253
254	__mffs(&__r.__d);
255	__oldmask = __r.__bits.__reg;
256	__r.__bits.__reg &= ~((__mask & FE_ALL_EXCEPT) >> _FPUSW_SHIFT);
257	__mtfsf(__r.__d);
258	return ((__oldmask & _ENABLE_MASK) << _FPUSW_SHIFT);
259}
260
261static inline int
262fegetexcept(void)
263{
264	union __fpscr __r;
265
266	__mffs(&__r.__d);
267	return ((__r.__bits.__reg & _ENABLE_MASK) << _FPUSW_SHIFT);
268}
269
270#endif /* __BSD_VISIBLE */
271
272__END_DECLS
273
274#endif	/* !_FENV_H_ */
275