intrin.h revision 314564
1225394Sjchandra/* ===-------- intrin.h ---------------------------------------------------===
2233563Sjchandra *
3233563Sjchandra * Permission is hereby granted, free of charge, to any person obtaining a copy
4225394Sjchandra * of this software and associated documentation files (the "Software"), to deal
5225394Sjchandra * in the Software without restriction, including without limitation the rights
6225394Sjchandra * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7225394Sjchandra * copies of the Software, and to permit persons to whom the Software is
8233563Sjchandra * furnished to do so, subject to the following conditions:
9225394Sjchandra *
10225394Sjchandra * The above copyright notice and this permission notice shall be included in
11225394Sjchandra * all copies or substantial portions of the Software.
12233563Sjchandra *
13233563Sjchandra * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14233563Sjchandra * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15279387Sjchandra * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16233563Sjchandra * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17233563Sjchandra * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18233563Sjchandra * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19233563Sjchandra * THE SOFTWARE.
20233563Sjchandra *
21233563Sjchandra *===-----------------------------------------------------------------------===
22233563Sjchandra */
23233563Sjchandra
24233563Sjchandra/* Only include this if we're compiling for the windows platform. */
25233563Sjchandra#ifndef _MSC_VER
26233563Sjchandra#include_next <intrin.h>
27233563Sjchandra#else
28233563Sjchandra
29225394Sjchandra#ifndef __INTRIN_H
30225394Sjchandra#define __INTRIN_H
31225394Sjchandra
32225394Sjchandra/* First include the standard intrinsics. */
33225394Sjchandra#if defined(__i386__) || defined(__x86_64__)
34225394Sjchandra#include <x86intrin.h>
35225394Sjchandra#endif
36225394Sjchandra
37225394Sjchandra#if defined(__arm__)
38225394Sjchandra#include <armintr.h>
39225394Sjchandra#endif
40225394Sjchandra
41233563Sjchandra/* For the definition of jmp_buf. */
42225394Sjchandra#if __STDC_HOSTED__
43225394Sjchandra#include <setjmp.h>
44225394Sjchandra#endif
45225394Sjchandra
46225394Sjchandra/* Define the default attributes for the functions in this file. */
47225394Sjchandra#define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
48225394Sjchandra
49233563Sjchandra#ifdef __cplusplus
50233563Sjchandraextern "C" {
51225394Sjchandra#endif
52225394Sjchandra
53225394Sjchandra#if defined(__MMX__)
54225394Sjchandra/* And the random ones that aren't in those files. */
55279384Sjchandra__m64 _m_from_float(float);
56279345Sjchandrafloat _m_to_float(__m64);
57279345Sjchandra#endif
58279345Sjchandra
59225394Sjchandra/* Other assorted instruction intrinsics. */
60225394Sjchandravoid __addfsbyte(unsigned long, unsigned char);
61225394Sjchandravoid __addfsdword(unsigned long, unsigned long);
62225394Sjchandravoid __addfsword(unsigned long, unsigned short);
63225394Sjchandravoid __code_seg(const char *);
64225394Sjchandrastatic __inline__
65225394Sjchandravoid __cpuid(int[4], int);
66225394Sjchandrastatic __inline__
67225394Sjchandravoid __cpuidex(int[4], int, int);
68225394Sjchandrastatic __inline__
69233536Sjchandra__int64 __emul(int, int);
70233556Sjchandrastatic __inline__
71225394Sjchandraunsigned __int64 __emulu(unsigned int, unsigned int);
72225394Sjchandravoid __cdecl __fastfail(unsigned int);
73225394Sjchandraunsigned int __getcallerseflags(void);
74225394Sjchandrastatic __inline__
75225394Sjchandravoid __halt(void);
76233563Sjchandraunsigned char __inbyte(unsigned short);
77225394Sjchandravoid __inbytestring(unsigned short, unsigned char *, unsigned long);
78233563Sjchandravoid __incfsbyte(unsigned long);
79279384Sjchandravoid __incfsdword(unsigned long);
80233563Sjchandravoid __incfsword(unsigned long);
81233563Sjchandraunsigned long __indword(unsigned short);
82279384Sjchandravoid __indwordstring(unsigned short, unsigned long *, unsigned long);
83279384Sjchandravoid __int2c(void);
84279384Sjchandravoid __invlpg(void *);
85233563Sjchandraunsigned short __inword(unsigned short);
86233563Sjchandravoid __inwordstring(unsigned short, unsigned short *, unsigned long);
87233563Sjchandravoid __lidt(void *);
88233563Sjchandraunsigned __int64 __ll_lshift(unsigned __int64, int);
89233563Sjchandra__int64 __ll_rshift(__int64, int);
90233563Sjchandravoid __llwpcb(void *);
91233563Sjchandraunsigned char __lwpins32(unsigned int, unsigned int, unsigned int);
92279384Sjchandravoid __lwpval32(unsigned int, unsigned int, unsigned int);
93233563Sjchandraunsigned int __lzcnt(unsigned int);
94233563Sjchandraunsigned short __lzcnt16(unsigned short);
95233563Sjchandrastatic __inline__
96233563Sjchandravoid __movsb(unsigned char *, unsigned char const *, size_t);
97233563Sjchandrastatic __inline__
98233563Sjchandravoid __movsd(unsigned long *, unsigned long const *, size_t);
99233563Sjchandrastatic __inline__
100233563Sjchandravoid __movsw(unsigned short *, unsigned short const *, size_t);
101233563Sjchandrastatic __inline__
102233563Sjchandravoid __nop(void);
103279384Sjchandravoid __nvreg_restore_fence(void);
104279384Sjchandravoid __nvreg_save_fence(void);
105279384Sjchandravoid __outbyte(unsigned short, unsigned char);
106279384Sjchandravoid __outbytestring(unsigned short, unsigned char *, unsigned long);
107279384Sjchandravoid __outdword(unsigned short, unsigned long);
108279384Sjchandravoid __outdwordstring(unsigned short, unsigned long *, unsigned long);
109279384Sjchandravoid __outword(unsigned short, unsigned short);
110279384Sjchandravoid __outwordstring(unsigned short, unsigned short *, unsigned long);
111279384Sjchandraunsigned long __readcr0(void);
112279384Sjchandraunsigned long __readcr2(void);
113279384Sjchandrastatic __inline__
114279384Sjchandraunsigned long __readcr3(void);
115279384Sjchandraunsigned long __readcr4(void);
116279384Sjchandraunsigned long __readcr8(void);
117279384Sjchandraunsigned int __readdr(unsigned int);
118279384Sjchandra#ifdef __i386__
119279384Sjchandrastatic __inline__
120279384Sjchandraunsigned char __readfsbyte(unsigned long);
121279384Sjchandrastatic __inline__
122279384Sjchandraunsigned __int64 __readfsqword(unsigned long);
123279384Sjchandrastatic __inline__
124279384Sjchandraunsigned short __readfsword(unsigned long);
125279384Sjchandra#endif
126279384Sjchandrastatic __inline__
127279384Sjchandraunsigned __int64 __readmsr(unsigned long);
128279384Sjchandraunsigned __int64 __readpmc(unsigned long);
129279384Sjchandraunsigned long __segmentlimit(unsigned long);
130279384Sjchandravoid __sidt(void *);
131279384Sjchandravoid *__slwpcb(void);
132233563Sjchandrastatic __inline__
133233563Sjchandravoid __stosb(unsigned char *, unsigned char, size_t);
134233563Sjchandrastatic __inline__
135233563Sjchandravoid __stosd(unsigned long *, unsigned long, size_t);
136233563Sjchandrastatic __inline__
137233563Sjchandravoid __stosw(unsigned short *, unsigned short, size_t);
138233563Sjchandravoid __svm_clgi(void);
139233563Sjchandravoid __svm_invlpga(void *, int);
140233563Sjchandravoid __svm_skinit(int);
141233563Sjchandravoid __svm_stgi(void);
142233563Sjchandravoid __svm_vmload(size_t);
143233563Sjchandravoid __svm_vmrun(size_t);
144233563Sjchandravoid __svm_vmsave(size_t);
145233563Sjchandravoid __ud2(void);
146233563Sjchandraunsigned __int64 __ull_rshift(unsigned __int64, int);
147233563Sjchandravoid __vmx_off(void);
148233563Sjchandravoid __vmx_vmptrst(unsigned __int64 *);
149233563Sjchandravoid __wbinvd(void);
150233563Sjchandravoid __writecr0(unsigned int);
151233563Sjchandrastatic __inline__
152233563Sjchandravoid __writecr3(unsigned int);
153233563Sjchandravoid __writecr4(unsigned int);
154233563Sjchandravoid __writecr8(unsigned int);
155233563Sjchandravoid __writedr(unsigned int, unsigned int);
156233563Sjchandravoid __writefsbyte(unsigned long, unsigned char);
157233563Sjchandravoid __writefsdword(unsigned long, unsigned long);
158233563Sjchandravoid __writefsqword(unsigned long, unsigned __int64);
159233563Sjchandravoid __writefsword(unsigned long, unsigned short);
160233570Sjchandravoid __writemsr(unsigned long, unsigned __int64);
161233570Sjchandrastatic __inline__
162233563Sjchandravoid *_AddressOfReturnAddress(void);
163233563Sjchandrastatic __inline__
164225394Sjchandraunsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
165225394Sjchandrastatic __inline__
166225394Sjchandraunsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
167225394Sjchandrastatic __inline__
168279345Sjchandraunsigned char _bittest(long const *, long);
169279345Sjchandrastatic __inline__
170279345Sjchandraunsigned char _bittestandcomplement(long *, long);
171279345Sjchandrastatic __inline__
172279345Sjchandraunsigned char _bittestandreset(long *, long);
173225394Sjchandrastatic __inline__
174225394Sjchandraunsigned char _bittestandset(long *, long);
175225394Sjchandravoid __cdecl _disable(void);
176225394Sjchandravoid __cdecl _enable(void);
177225394Sjchandralong _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
178225394Sjchandraunsigned char _interlockedbittestandreset(long volatile *, long);
179225394Sjchandrastatic __inline__
180225394Sjchandraunsigned char _interlockedbittestandset(long volatile *, long);
181225394Sjchandralong _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
182225394Sjchandralong _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
183225394Sjchandra__int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
184225394Sjchandra                                                 __int64);
185225394Sjchandra__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
186225394Sjchandra                                                 __int64);
187225394Sjchandravoid *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
188225394Sjchandra                                                    void *);
189225394Sjchandravoid *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
190225394Sjchandra                                                    void *);
191225394Sjchandralong _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
192225394Sjchandralong _InterlockedExchangeAdd_HLERelease(long volatile *, long);
193225394Sjchandra__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
194225394Sjchandra__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
195225394Sjchandravoid __cdecl _invpcid(unsigned int, void *);
196225394Sjchandrastatic __inline__ void
197225394Sjchandra__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
198225394Sjchandra_ReadBarrier(void);
199225394Sjchandrastatic __inline__ void
200225394Sjchandra__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
201225394Sjchandra_ReadWriteBarrier(void);
202225394Sjchandraunsigned int _rorx_u32(unsigned int, const unsigned int);
203225394Sjchandraint _sarx_i32(int, unsigned int);
204225394Sjchandra#if __STDC_HOSTED__
205225394Sjchandraint __cdecl _setjmp(jmp_buf);
206225394Sjchandra#endif
207225394Sjchandraunsigned int _shlx_u32(unsigned int, unsigned int);
208225394Sjchandraunsigned int _shrx_u32(unsigned int, unsigned int);
209225394Sjchandravoid _Store_HLERelease(long volatile *, long);
210225394Sjchandravoid _Store64_HLERelease(__int64 volatile *, __int64);
211225394Sjchandravoid _StorePointer_HLERelease(void *volatile *, void *);
212225394Sjchandrastatic __inline__ void
213225394Sjchandra__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
214225394Sjchandra_WriteBarrier(void);
215225394Sjchandraunsigned __int32 xbegin(void);
216225394Sjchandravoid _xend(void);
217225394Sjchandrastatic __inline__
218225394Sjchandra#define _XCR_XFEATURE_ENABLED_MASK 0
219225394Sjchandraunsigned __int64 __cdecl _xgetbv(unsigned int);
220225394Sjchandravoid __cdecl _xsetbv(unsigned int, unsigned __int64);
221225394Sjchandra
222225394Sjchandra/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
223279387Sjchandra#ifdef __x86_64__
224233563Sjchandravoid __addgsbyte(unsigned long, unsigned char);
225233563Sjchandravoid __addgsdword(unsigned long, unsigned long);
226233563Sjchandravoid __addgsqword(unsigned long, unsigned __int64);
227225394Sjchandravoid __addgsword(unsigned long, unsigned short);
228233563Sjchandrastatic __inline__
229233563Sjchandravoid __faststorefence(void);
230225394Sjchandravoid __incgsbyte(unsigned long);
231233563Sjchandravoid __incgsdword(unsigned long);
232225394Sjchandravoid __incgsqword(unsigned long);
233225394Sjchandravoid __incgsword(unsigned long);
234225394Sjchandraunsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
235225394Sjchandravoid __lwpval64(unsigned __int64, unsigned int, unsigned int);
236225394Sjchandraunsigned __int64 __lzcnt64(unsigned __int64);
237225394Sjchandrastatic __inline__
238225394Sjchandravoid __movsq(unsigned long long *, unsigned long long const *, size_t);
239225394Sjchandrastatic __inline__
240225394Sjchandraunsigned char __readgsbyte(unsigned long);
241225394Sjchandrastatic __inline__
242225394Sjchandraunsigned long __readgsdword(unsigned long);
243225394Sjchandrastatic __inline__
244225394Sjchandraunsigned __int64 __readgsqword(unsigned long);
245225394Sjchandraunsigned short __readgsword(unsigned long);
246225394Sjchandraunsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
247225394Sjchandra                                unsigned __int64 _HighPart,
248225394Sjchandra                                unsigned char _Shift);
249225394Sjchandraunsigned __int64 __shiftright128(unsigned __int64 _LowPart,
250225394Sjchandra                                 unsigned __int64 _HighPart,
251225394Sjchandra                                 unsigned char _Shift);
252225394Sjchandrastatic __inline__
253225394Sjchandravoid __stosq(unsigned __int64 *, unsigned __int64, size_t);
254225394Sjchandraunsigned char __vmx_on(unsigned __int64 *);
255225394Sjchandraunsigned char __vmx_vmclear(unsigned __int64 *);
256225394Sjchandraunsigned char __vmx_vmlaunch(void);
257225394Sjchandraunsigned char __vmx_vmptrld(unsigned __int64 *);
258225394Sjchandraunsigned char __vmx_vmread(size_t, size_t *);
259225394Sjchandraunsigned char __vmx_vmresume(void);
260225394Sjchandraunsigned char __vmx_vmwrite(size_t, size_t);
261225394Sjchandravoid __writegsbyte(unsigned long, unsigned char);
262225394Sjchandravoid __writegsdword(unsigned long, unsigned long);
263225394Sjchandravoid __writegsqword(unsigned long, unsigned __int64);
264225394Sjchandravoid __writegsword(unsigned long, unsigned short);
265225394Sjchandrastatic __inline__
266233563Sjchandraunsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
267233563Sjchandrastatic __inline__
268233563Sjchandraunsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
269233563Sjchandrastatic __inline__
270233563Sjchandraunsigned char _bittest64(__int64 const *, __int64);
271225394Sjchandrastatic __inline__
272225394Sjchandraunsigned char _bittestandcomplement64(__int64 *, __int64);
273225394Sjchandrastatic __inline__
274233536Sjchandraunsigned char _bittestandreset64(__int64 *, __int64);
275245877Sjchandrastatic __inline__
276245877Sjchandraunsigned char _bittestandset64(__int64 *, __int64);
277245877Sjchandralong _InterlockedAnd_np(long volatile *_Value, long _Mask);
278233536Sjchandrashort _InterlockedAnd16_np(short volatile *_Value, short _Mask);
279233536Sjchandra__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
280233563Sjchandrachar _InterlockedAnd8_np(char volatile *_Value, char _Mask);
281233536Sjchandraunsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
282245877Sjchandrastatic __inline__
283233536Sjchandraunsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
284233536Sjchandralong _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
285233536Sjchandra                                    long _Comparand);
286233536Sjchandraunsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
287233536Sjchandra                                             __int64 _ExchangeHigh,
288233536Sjchandra                                             __int64 _ExchangeLow,
289233536Sjchandra                                             __int64 *_CompareandResult);
290233536Sjchandraunsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
291233536Sjchandra                                                __int64 _ExchangeHigh,
292233536Sjchandra                                                __int64 _ExchangeLow,
293233536Sjchandra                                                __int64 *_ComparandResult);
294233536Sjchandrashort _InterlockedCompareExchange16_np(short volatile *_Destination,
295233536Sjchandra                                       short _Exchange, short _Comparand);
296233536Sjchandra__int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
297238289Sjchandra                                                 __int64);
298233536Sjchandra__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
299233536Sjchandra                                                 __int64);
300233536Sjchandra__int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
301233536Sjchandra                                         __int64 _Exchange, __int64 _Comparand);
302233536Sjchandravoid *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
303238289Sjchandra                                            void *_Exchange, void *_Comparand);
304245877Sjchandralong _InterlockedOr_np(long volatile *_Value, long _Mask);
305233536Sjchandrashort _InterlockedOr16_np(short volatile *_Value, short _Mask);
306233536Sjchandra__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
307279387Sjchandrachar _InterlockedOr8_np(char volatile *_Value, char _Mask);
308225394Sjchandralong _InterlockedXor_np(long volatile *_Value, long _Mask);
309225394Sjchandrashort _InterlockedXor16_np(short volatile *_Value, short _Mask);
310233536Sjchandra__int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
311225394Sjchandrachar _InterlockedXor8_np(char volatile *_Value, char _Mask);
312233536Sjchandraunsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
313233536Sjchandra__int64 _sarx_i64(__int64, unsigned int);
314233536Sjchandraunsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
315233563Sjchandraunsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
316233536Sjchandrastatic __inline__
317287882Szbb__int64 __mulh(__int64, __int64);
318225394Sjchandrastatic __inline__
319225394Sjchandraunsigned __int64 __umulh(unsigned __int64, unsigned __int64);
320225394Sjchandrastatic __inline__
321225394Sjchandra__int64 _mul128(__int64, __int64, __int64*);
322225394Sjchandrastatic __inline__
323225394Sjchandraunsigned __int64 _umul128(unsigned __int64,
324279387Sjchandra                          unsigned __int64,
325225394Sjchandra                          unsigned __int64*);
326225394Sjchandra
327225394Sjchandra#endif /* __x86_64__ */
328225394Sjchandra
329225394Sjchandra#if defined(__x86_64__) || defined(__arm__)
330225394Sjchandra
331225394Sjchandrastatic __inline__
332225394Sjchandra__int64 _InterlockedDecrement64(__int64 volatile *_Addend);
333225394Sjchandrastatic __inline__
334225394Sjchandra__int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
335225394Sjchandrastatic __inline__
336225394Sjchandra__int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
337225394Sjchandrastatic __inline__
338225394Sjchandra__int64 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value);
339225394Sjchandrastatic __inline__
340225394Sjchandra__int64 _InterlockedIncrement64(__int64 volatile *_Addend);
341225394Sjchandrastatic __inline__
342225394Sjchandra__int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
343225394Sjchandrastatic __inline__
344225394Sjchandra__int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
345225394Sjchandrastatic __inline__
346225394Sjchandra__int64 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask);
347225394Sjchandra
348225394Sjchandra#endif
349225394Sjchandra
350225394Sjchandra/*----------------------------------------------------------------------------*\
351225394Sjchandra|* Bit Counting and Testing
352225394Sjchandra\*----------------------------------------------------------------------------*/
353225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
354279387Sjchandra_bittest(long const *_BitBase, long _BitPos) {
355225394Sjchandra  return (*_BitBase >> _BitPos) & 1;
356225394Sjchandra}
357225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
358225394Sjchandra_bittestandcomplement(long *_BitBase, long _BitPos) {
359225394Sjchandra  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
360225394Sjchandra  *_BitBase = *_BitBase ^ (1 << _BitPos);
361225394Sjchandra  return _Res;
362225394Sjchandra}
363225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
364225394Sjchandra_bittestandreset(long *_BitBase, long _BitPos) {
365225394Sjchandra  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
366225394Sjchandra  *_BitBase = *_BitBase & ~(1 << _BitPos);
367225394Sjchandra  return _Res;
368225394Sjchandra}
369225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
370225394Sjchandra_bittestandset(long *_BitBase, long _BitPos) {
371225394Sjchandra  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
372225394Sjchandra  *_BitBase = *_BitBase | (1 << _BitPos);
373225394Sjchandra  return _Res;
374225394Sjchandra}
375225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
376225394Sjchandra_interlockedbittestandset(long volatile *_BitBase, long _BitPos) {
377225394Sjchandra  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_SEQ_CST);
378225394Sjchandra  return (_PrevVal >> _BitPos) & 1;
379225394Sjchandra}
380225394Sjchandra#if defined(__arm__) || defined(__aarch64__)
381279306Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
382225394Sjchandra_interlockedbittestandset_acq(long volatile *_BitBase, long _BitPos) {
383279306Sjchandra  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE);
384279387Sjchandra  return (_PrevVal >> _BitPos) & 1;
385225394Sjchandra}
386225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
387225394Sjchandra_interlockedbittestandset_nf(long volatile *_BitBase, long _BitPos) {
388279306Sjchandra  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED);
389279306Sjchandra  return (_PrevVal >> _BitPos) & 1;
390279306Sjchandra}
391279306Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
392225394Sjchandra_interlockedbittestandset_rel(long volatile *_BitBase, long _BitPos) {
393225394Sjchandra  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE);
394225394Sjchandra  return (_PrevVal >> _BitPos) & 1;
395279341Sjchandra}
396225394Sjchandra#endif
397225394Sjchandra#ifdef __x86_64__
398225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
399225394Sjchandra_bittest64(__int64 const *_BitBase, __int64 _BitPos) {
400225394Sjchandra  return (*_BitBase >> _BitPos) & 1;
401225394Sjchandra}
402225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
403227783Sjchandra_bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
404225394Sjchandra  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
405225394Sjchandra  *_BitBase = *_BitBase ^ (1ll << _BitPos);
406225394Sjchandra  return _Res;
407225394Sjchandra}
408225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
409225394Sjchandra_bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
410225394Sjchandra  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
411225394Sjchandra  *_BitBase = *_BitBase & ~(1ll << _BitPos);
412225394Sjchandra  return _Res;
413225394Sjchandra}
414225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
415225394Sjchandra_bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
416225394Sjchandra  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
417225394Sjchandra  *_BitBase = *_BitBase | (1ll << _BitPos);
418225394Sjchandra  return _Res;
419225394Sjchandra}
420225394Sjchandrastatic __inline__ unsigned char __DEFAULT_FN_ATTRS
421225394Sjchandra_interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
422225394Sjchandra  long long _PrevVal =
423225394Sjchandra      __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
424225394Sjchandra  return (_PrevVal >> _BitPos) & 1;
425233563Sjchandra}
426225394Sjchandra#endif
427225394Sjchandra/*----------------------------------------------------------------------------*\
428225394Sjchandra|* Interlocked Exchange Add
429225394Sjchandra\*----------------------------------------------------------------------------*/
430225394Sjchandra#if defined(__arm__) || defined(__aarch64__)
431225394Sjchandrastatic __inline__ char __DEFAULT_FN_ATTRS
432225394Sjchandra_InterlockedExchangeAdd8_acq(char volatile *_Addend, char _Value) {
433225394Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
434225394Sjchandra}
435225394Sjchandrastatic __inline__ char __DEFAULT_FN_ATTRS
436297000Sjhibbits_InterlockedExchangeAdd8_nf(char volatile *_Addend, char _Value) {
437225394Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
438225394Sjchandra}
439225394Sjchandrastatic __inline__ char __DEFAULT_FN_ATTRS
440225394Sjchandra_InterlockedExchangeAdd8_rel(char volatile *_Addend, char _Value) {
441233563Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
442233563Sjchandra}
443225394Sjchandrastatic __inline__ short __DEFAULT_FN_ATTRS
444233563Sjchandra_InterlockedExchangeAdd16_acq(short volatile *_Addend, short _Value) {
445225394Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
446225394Sjchandra}
447279387Sjchandrastatic __inline__ short __DEFAULT_FN_ATTRS
448225394Sjchandra_InterlockedExchangeAdd16_nf(short volatile *_Addend, short _Value) {
449225394Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
450225394Sjchandra}
451225394Sjchandrastatic __inline__ short __DEFAULT_FN_ATTRS
452227783Sjchandra_InterlockedExchangeAdd16_rel(short volatile *_Addend, short _Value) {
453227783Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
454227783Sjchandra}
455225394Sjchandrastatic __inline__ long __DEFAULT_FN_ATTRS
456225394Sjchandra_InterlockedExchangeAdd_acq(long volatile *_Addend, long _Value) {
457225394Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
458225394Sjchandra}
459225394Sjchandrastatic __inline__ long __DEFAULT_FN_ATTRS
460233536Sjchandra_InterlockedExchangeAdd_nf(long volatile *_Addend, long _Value) {
461225394Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
462225394Sjchandra}
463225394Sjchandrastatic __inline__ long __DEFAULT_FN_ATTRS
464225394Sjchandra_InterlockedExchangeAdd_rel(long volatile *_Addend, long _Value) {
465225394Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
466225394Sjchandra}
467225394Sjchandrastatic __inline__ __int64 __DEFAULT_FN_ATTRS
468225394Sjchandra_InterlockedExchangeAdd64_acq(__int64 volatile *_Addend, __int64 _Value) {
469233536Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
470225394Sjchandra}
471225394Sjchandrastatic __inline__ __int64 __DEFAULT_FN_ATTRS
472225394Sjchandra_InterlockedExchangeAdd64_nf(__int64 volatile *_Addend, __int64 _Value) {
473225394Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
474233536Sjchandra}
475225394Sjchandrastatic __inline__ __int64 __DEFAULT_FN_ATTRS
476225394Sjchandra_InterlockedExchangeAdd64_rel(__int64 volatile *_Addend, __int64 _Value) {
477225394Sjchandra  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
478233536Sjchandra}
479233536Sjchandra#endif
480225394Sjchandra/*----------------------------------------------------------------------------*\
481225394Sjchandra|* Interlocked Increment
482225394Sjchandra\*----------------------------------------------------------------------------*/
483279387Sjchandra#if defined(__arm__) || defined(__aarch64__)
484233536Sjchandrastatic __inline__ short __DEFAULT_FN_ATTRS
485233536Sjchandra_InterlockedIncrement16_acq(short volatile *_Value) {
486279306Sjchandra  return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
487279306Sjchandra}
488225394Sjchandrastatic __inline__ short __DEFAULT_FN_ATTRS
489279341Sjchandra_InterlockedIncrement16_nf(short volatile *_Value) {
490279341Sjchandra  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
491279341Sjchandra}
492279341Sjchandrastatic __inline__ short __DEFAULT_FN_ATTRS
493279341Sjchandra_InterlockedIncrement16_rel(short volatile *_Value) {
494225394Sjchandra  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
495225394Sjchandra}
496225394Sjchandrastatic __inline__ long __DEFAULT_FN_ATTRS
497225394Sjchandra_InterlockedIncrement_acq(long volatile *_Value) {
498225394Sjchandra  return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
499233563Sjchandra}
500225394Sjchandrastatic __inline__ long __DEFAULT_FN_ATTRS
501225394Sjchandra_InterlockedIncrement_nf(long volatile *_Value) {
502225394Sjchandra  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
503225394Sjchandra}
504225394Sjchandrastatic __inline__ long __DEFAULT_FN_ATTRS
505225394Sjchandra_InterlockedIncrement_rel(long volatile *_Value) {
506225394Sjchandra  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
507225394Sjchandra}
508225394Sjchandrastatic __inline__ __int64 __DEFAULT_FN_ATTRS
509225394Sjchandra_InterlockedIncrement64_acq(__int64 volatile *_Value) {
510233563Sjchandra  return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
511225394Sjchandra}
512279306Sjchandrastatic __inline__ __int64 __DEFAULT_FN_ATTRS
513225394Sjchandra_InterlockedIncrement64_nf(__int64 volatile *_Value) {
514225394Sjchandra  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
515225394Sjchandra}
516225394Sjchandrastatic __inline__ __int64 __DEFAULT_FN_ATTRS
517225394Sjchandra_InterlockedIncrement64_rel(__int64 volatile *_Value) {
518225394Sjchandra  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
519225394Sjchandra}
520227783Sjchandra#endif
521227783Sjchandra/*----------------------------------------------------------------------------*\
522227783Sjchandra|* Interlocked Decrement
523227783Sjchandra\*----------------------------------------------------------------------------*/
524227783Sjchandra#if defined(__arm__) || defined(__aarch64__)
525227783Sjchandrastatic __inline__ short __DEFAULT_FN_ATTRS
526227783Sjchandra_InterlockedDecrement16_acq(short volatile *_Value) {
527227783Sjchandra  return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
528227783Sjchandra}
529279306Sjchandrastatic __inline__ short __DEFAULT_FN_ATTRS
530279306Sjchandra_InterlockedDecrement16_nf(short volatile *_Value) {
531279306Sjchandra  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
532279306Sjchandra}
533227783Sjchandrastatic __inline__ short __DEFAULT_FN_ATTRS
534227783Sjchandra_InterlockedDecrement16_rel(short volatile *_Value) {
535279306Sjchandra  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
536227783Sjchandra}
537225394Sjchandrastatic __inline__ long __DEFAULT_FN_ATTRS
538225394Sjchandra_InterlockedDecrement_acq(long volatile *_Value) {
539225394Sjchandra  return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
540225394Sjchandra}
541225394Sjchandrastatic __inline__ long __DEFAULT_FN_ATTRS
542225394Sjchandra_InterlockedDecrement_nf(long volatile *_Value) {
543225394Sjchandra  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
544225394Sjchandra}
545225394Sjchandrastatic __inline__ long __DEFAULT_FN_ATTRS
546225394Sjchandra_InterlockedDecrement_rel(long volatile *_Value) {
547279384Sjchandra  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
548279384Sjchandra}
549279384Sjchandrastatic __inline__ __int64 __DEFAULT_FN_ATTRS
550279384Sjchandra_InterlockedDecrement64_acq(__int64 volatile *_Value) {
551233563Sjchandra  return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
552233563Sjchandra}
553225394Sjchandrastatic __inline__ __int64 __DEFAULT_FN_ATTRS
554225394Sjchandra_InterlockedDecrement64_nf(__int64 volatile *_Value) {
555225394Sjchandra  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
556225394Sjchandra}
557225394Sjchandrastatic __inline__ __int64 __DEFAULT_FN_ATTRS
558233563Sjchandra_InterlockedDecrement64_rel(__int64 volatile *_Value) {
559225394Sjchandra  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
560225394Sjchandra}
561225394Sjchandra#endif
562225394Sjchandra/*----------------------------------------------------------------------------*\
563225394Sjchandra|* Interlocked And
564227843Smarius\*----------------------------------------------------------------------------*/
565225394Sjchandra#if defined(__arm__) || defined(__aarch64__)
566225394Sjchandrastatic __inline__ char __DEFAULT_FN_ATTRS
567225394Sjchandra_InterlockedAnd8_acq(char volatile *_Value, char _Mask) {
568225394Sjchandra  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
569225394Sjchandra}
570233563Sjchandrastatic __inline__ char __DEFAULT_FN_ATTRS
571225394Sjchandra_InterlockedAnd8_nf(char volatile *_Value, char _Mask) {
572225394Sjchandra  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
573279345Sjchandra}
574279345Sjchandrastatic __inline__ char __DEFAULT_FN_ATTRS
575_InterlockedAnd8_rel(char volatile *_Value, char _Mask) {
576  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
577}
578static __inline__ short __DEFAULT_FN_ATTRS
579_InterlockedAnd16_acq(short volatile *_Value, short _Mask) {
580  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
581}
582static __inline__ short __DEFAULT_FN_ATTRS
583_InterlockedAnd16_nf(short volatile *_Value, short _Mask) {
584  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
585}
586static __inline__ short __DEFAULT_FN_ATTRS
587_InterlockedAnd16_rel(short volatile *_Value, short _Mask) {
588  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
589}
590static __inline__ long __DEFAULT_FN_ATTRS
591_InterlockedAnd_acq(long volatile *_Value, long _Mask) {
592  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
593}
594static __inline__ long __DEFAULT_FN_ATTRS
595_InterlockedAnd_nf(long volatile *_Value, long _Mask) {
596  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
597}
598static __inline__ long __DEFAULT_FN_ATTRS
599_InterlockedAnd_rel(long volatile *_Value, long _Mask) {
600  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
601}
602static __inline__ __int64 __DEFAULT_FN_ATTRS
603_InterlockedAnd64_acq(__int64 volatile *_Value, __int64 _Mask) {
604  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
605}
606static __inline__ __int64 __DEFAULT_FN_ATTRS
607_InterlockedAnd64_nf(__int64 volatile *_Value, __int64 _Mask) {
608  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
609}
610static __inline__ __int64 __DEFAULT_FN_ATTRS
611_InterlockedAnd64_rel(__int64 volatile *_Value, __int64 _Mask) {
612  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
613}
614#endif
615/*----------------------------------------------------------------------------*\
616|* Interlocked Or
617\*----------------------------------------------------------------------------*/
618#if defined(__arm__) || defined(__aarch64__)
619static __inline__ char __DEFAULT_FN_ATTRS
620_InterlockedOr8_acq(char volatile *_Value, char _Mask) {
621  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
622}
623static __inline__ char __DEFAULT_FN_ATTRS
624_InterlockedOr8_nf(char volatile *_Value, char _Mask) {
625  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
626}
627static __inline__ char __DEFAULT_FN_ATTRS
628_InterlockedOr8_rel(char volatile *_Value, char _Mask) {
629  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
630}
631static __inline__ short __DEFAULT_FN_ATTRS
632_InterlockedOr16_acq(short volatile *_Value, short _Mask) {
633  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
634}
635static __inline__ short __DEFAULT_FN_ATTRS
636_InterlockedOr16_nf(short volatile *_Value, short _Mask) {
637  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
638}
639static __inline__ short __DEFAULT_FN_ATTRS
640_InterlockedOr16_rel(short volatile *_Value, short _Mask) {
641  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
642}
643static __inline__ long __DEFAULT_FN_ATTRS
644_InterlockedOr_acq(long volatile *_Value, long _Mask) {
645  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
646}
647static __inline__ long __DEFAULT_FN_ATTRS
648_InterlockedOr_nf(long volatile *_Value, long _Mask) {
649  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
650}
651static __inline__ long __DEFAULT_FN_ATTRS
652_InterlockedOr_rel(long volatile *_Value, long _Mask) {
653  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
654}
655static __inline__ __int64 __DEFAULT_FN_ATTRS
656_InterlockedOr64_acq(__int64 volatile *_Value, __int64 _Mask) {
657  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
658}
659static __inline__ __int64 __DEFAULT_FN_ATTRS
660_InterlockedOr64_nf(__int64 volatile *_Value, __int64 _Mask) {
661  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
662}
663static __inline__ __int64 __DEFAULT_FN_ATTRS
664_InterlockedOr64_rel(__int64 volatile *_Value, __int64 _Mask) {
665  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
666}
667#endif
668/*----------------------------------------------------------------------------*\
669|* Interlocked Xor
670\*----------------------------------------------------------------------------*/
671#if defined(__arm__) || defined(__aarch64__)
672static __inline__ char __DEFAULT_FN_ATTRS
673_InterlockedXor8_acq(char volatile *_Value, char _Mask) {
674  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
675}
676static __inline__ char __DEFAULT_FN_ATTRS
677_InterlockedXor8_nf(char volatile *_Value, char _Mask) {
678  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
679}
680static __inline__ char __DEFAULT_FN_ATTRS
681_InterlockedXor8_rel(char volatile *_Value, char _Mask) {
682  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
683}
684static __inline__ short __DEFAULT_FN_ATTRS
685_InterlockedXor16_acq(short volatile *_Value, short _Mask) {
686  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
687}
688static __inline__ short __DEFAULT_FN_ATTRS
689_InterlockedXor16_nf(short volatile *_Value, short _Mask) {
690  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
691}
692static __inline__ short __DEFAULT_FN_ATTRS
693_InterlockedXor16_rel(short volatile *_Value, short _Mask) {
694  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
695}
696static __inline__ long __DEFAULT_FN_ATTRS
697_InterlockedXor_acq(long volatile *_Value, long _Mask) {
698  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
699}
700static __inline__ long __DEFAULT_FN_ATTRS
701_InterlockedXor_nf(long volatile *_Value, long _Mask) {
702  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
703}
704static __inline__ long __DEFAULT_FN_ATTRS
705_InterlockedXor_rel(long volatile *_Value, long _Mask) {
706  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
707}
708static __inline__ __int64 __DEFAULT_FN_ATTRS
709_InterlockedXor64_acq(__int64 volatile *_Value, __int64 _Mask) {
710  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
711}
712static __inline__ __int64 __DEFAULT_FN_ATTRS
713_InterlockedXor64_nf(__int64 volatile *_Value, __int64 _Mask) {
714  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
715}
716static __inline__ __int64 __DEFAULT_FN_ATTRS
717_InterlockedXor64_rel(__int64 volatile *_Value, __int64 _Mask) {
718  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
719}
720#endif
721/*----------------------------------------------------------------------------*\
722|* Interlocked Exchange
723\*----------------------------------------------------------------------------*/
724#if defined(__arm__) || defined(__aarch64__)
725static __inline__ char __DEFAULT_FN_ATTRS
726_InterlockedExchange8_acq(char volatile *_Target, char _Value) {
727  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
728  return _Value;
729}
730static __inline__ char __DEFAULT_FN_ATTRS
731_InterlockedExchange8_nf(char volatile *_Target, char _Value) {
732  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
733  return _Value;
734}
735static __inline__ char __DEFAULT_FN_ATTRS
736_InterlockedExchange8_rel(char volatile *_Target, char _Value) {
737  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
738  return _Value;
739}
740static __inline__ short __DEFAULT_FN_ATTRS
741_InterlockedExchange16_acq(short volatile *_Target, short _Value) {
742  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
743  return _Value;
744}
745static __inline__ short __DEFAULT_FN_ATTRS
746_InterlockedExchange16_nf(short volatile *_Target, short _Value) {
747  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
748  return _Value;
749}
750static __inline__ short __DEFAULT_FN_ATTRS
751_InterlockedExchange16_rel(short volatile *_Target, short _Value) {
752  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
753  return _Value;
754}
755static __inline__ long __DEFAULT_FN_ATTRS
756_InterlockedExchange_acq(long volatile *_Target, long _Value) {
757  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
758  return _Value;
759}
760static __inline__ long __DEFAULT_FN_ATTRS
761_InterlockedExchange_nf(long volatile *_Target, long _Value) {
762  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
763  return _Value;
764}
765static __inline__ long __DEFAULT_FN_ATTRS
766_InterlockedExchange_rel(long volatile *_Target, long _Value) {
767  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
768  return _Value;
769}
770static __inline__ __int64 __DEFAULT_FN_ATTRS
771_InterlockedExchange64_acq(__int64 volatile *_Target, __int64 _Value) {
772  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
773  return _Value;
774}
775static __inline__ __int64 __DEFAULT_FN_ATTRS
776_InterlockedExchange64_nf(__int64 volatile *_Target, __int64 _Value) {
777  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
778  return _Value;
779}
780static __inline__ __int64 __DEFAULT_FN_ATTRS
781_InterlockedExchange64_rel(__int64 volatile *_Target, __int64 _Value) {
782  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
783  return _Value;
784}
785#endif
786/*----------------------------------------------------------------------------*\
787|* Interlocked Compare Exchange
788\*----------------------------------------------------------------------------*/
789#if defined(__arm__) || defined(__aarch64__)
790static __inline__ char __DEFAULT_FN_ATTRS
791_InterlockedCompareExchange8_acq(char volatile *_Destination,
792                             char _Exchange, char _Comparand) {
793  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
794                            __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
795  return _Comparand;
796}
797static __inline__ char __DEFAULT_FN_ATTRS
798_InterlockedCompareExchange8_nf(char volatile *_Destination,
799                             char _Exchange, char _Comparand) {
800  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
801                            __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
802  return _Comparand;
803}
804static __inline__ char __DEFAULT_FN_ATTRS
805_InterlockedCompareExchange8_rel(char volatile *_Destination,
806                             char _Exchange, char _Comparand) {
807  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
808                            __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
809  return _Comparand;
810}
811static __inline__ short __DEFAULT_FN_ATTRS
812_InterlockedCompareExchange16_acq(short volatile *_Destination,
813                              short _Exchange, short _Comparand) {
814  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
815                            __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
816  return _Comparand;
817}
818static __inline__ short __DEFAULT_FN_ATTRS
819_InterlockedCompareExchange16_nf(short volatile *_Destination,
820                              short _Exchange, short _Comparand) {
821  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
822                            __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
823  return _Comparand;
824}
825static __inline__ short __DEFAULT_FN_ATTRS
826_InterlockedCompareExchange16_rel(short volatile *_Destination,
827                              short _Exchange, short _Comparand) {
828  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
829                            __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
830  return _Comparand;
831}
832static __inline__ long __DEFAULT_FN_ATTRS
833_InterlockedCompareExchange_acq(long volatile *_Destination,
834                              long _Exchange, long _Comparand) {
835  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
836                            __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
837  return _Comparand;
838}
839static __inline__ long __DEFAULT_FN_ATTRS
840_InterlockedCompareExchange_nf(long volatile *_Destination,
841                              long _Exchange, long _Comparand) {
842  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
843                            __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
844  return _Comparand;
845}
846static __inline__ short __DEFAULT_FN_ATTRS
847_InterlockedCompareExchange_rel(long volatile *_Destination,
848                              long _Exchange, long _Comparand) {
849  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
850                            __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
851  return _Comparand;
852}
853static __inline__ __int64 __DEFAULT_FN_ATTRS
854_InterlockedCompareExchange64_acq(__int64 volatile *_Destination,
855                              __int64 _Exchange, __int64 _Comparand) {
856  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
857                            __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
858  return _Comparand;
859}
860static __inline__ __int64 __DEFAULT_FN_ATTRS
861_InterlockedCompareExchange64_nf(__int64 volatile *_Destination,
862                              __int64 _Exchange, __int64 _Comparand) {
863  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
864                            __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
865  return _Comparand;
866}
867static __inline__ __int64 __DEFAULT_FN_ATTRS
868_InterlockedCompareExchange64_rel(__int64 volatile *_Destination,
869                              __int64 _Exchange, __int64 _Comparand) {
870  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
871                            __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
872  return _Comparand;
873}
874#endif
875/*----------------------------------------------------------------------------*\
876|* readfs, readgs
877|* (Pointers in address space #256 and #257 are relative to the GS and FS
878|* segment registers, respectively.)
879\*----------------------------------------------------------------------------*/
880#define __ptr_to_addr_space(__addr_space_nbr, __type, __offset)              \
881    ((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \
882    (__offset))
883
884#ifdef __i386__
885static __inline__ unsigned char __DEFAULT_FN_ATTRS
886__readfsbyte(unsigned long __offset) {
887  return *__ptr_to_addr_space(257, unsigned char, __offset);
888}
889static __inline__ unsigned short __DEFAULT_FN_ATTRS
890__readfsword(unsigned long __offset) {
891  return *__ptr_to_addr_space(257, unsigned short, __offset);
892}
893static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
894__readfsqword(unsigned long __offset) {
895  return *__ptr_to_addr_space(257, unsigned __int64, __offset);
896}
897#endif
898#ifdef __x86_64__
899static __inline__ unsigned char __DEFAULT_FN_ATTRS
900__readgsbyte(unsigned long __offset) {
901  return *__ptr_to_addr_space(256, unsigned char, __offset);
902}
903static __inline__ unsigned short __DEFAULT_FN_ATTRS
904__readgsword(unsigned long __offset) {
905  return *__ptr_to_addr_space(256, unsigned short, __offset);
906}
907static __inline__ unsigned long __DEFAULT_FN_ATTRS
908__readgsdword(unsigned long __offset) {
909  return *__ptr_to_addr_space(256, unsigned long, __offset);
910}
911static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
912__readgsqword(unsigned long __offset) {
913  return *__ptr_to_addr_space(256, unsigned __int64, __offset);
914}
915#endif
916#undef __ptr_to_addr_space
917/*----------------------------------------------------------------------------*\
918|* movs, stos
919\*----------------------------------------------------------------------------*/
920#if defined(__i386__) || defined(__x86_64__)
921static __inline__ void __DEFAULT_FN_ATTRS
922__movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
923  __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n));
924}
925static __inline__ void __DEFAULT_FN_ATTRS
926__movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
927  __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n));
928}
929static __inline__ void __DEFAULT_FN_ATTRS
930__movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
931  __asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n));
932}
933static __inline__ void __DEFAULT_FN_ATTRS
934__stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
935  __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n));
936}
937static __inline__ void __DEFAULT_FN_ATTRS
938__stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
939  __asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n));
940}
941#endif
942#ifdef __x86_64__
943static __inline__ void __DEFAULT_FN_ATTRS
944__movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
945  __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n));
946}
947static __inline__ void __DEFAULT_FN_ATTRS
948__stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
949  __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n));
950}
951#endif
952
953/*----------------------------------------------------------------------------*\
954|* Misc
955\*----------------------------------------------------------------------------*/
956#if defined(__i386__) || defined(__x86_64__)
957static __inline__ void __DEFAULT_FN_ATTRS
958__cpuid(int __info[4], int __level) {
959  __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
960                   : "a"(__level));
961}
962static __inline__ void __DEFAULT_FN_ATTRS
963__cpuidex(int __info[4], int __level, int __ecx) {
964  __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
965                   : "a"(__level), "c"(__ecx));
966}
967static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
968_xgetbv(unsigned int __xcr_no) {
969  unsigned int __eax, __edx;
970  __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
971  return ((unsigned __int64)__edx << 32) | __eax;
972}
973static __inline__ void __DEFAULT_FN_ATTRS
974__halt(void) {
975  __asm__ volatile ("hlt");
976}
977static __inline__ void __DEFAULT_FN_ATTRS
978__nop(void) {
979  __asm__ volatile ("nop");
980}
981#endif
982
983/*----------------------------------------------------------------------------*\
984|* Privileged intrinsics
985\*----------------------------------------------------------------------------*/
986#if defined(__i386__) || defined(__x86_64__)
987static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
988__readmsr(unsigned long __register) {
989  // Loads the contents of a 64-bit model specific register (MSR) specified in
990  // the ECX register into registers EDX:EAX. The EDX register is loaded with
991  // the high-order 32 bits of the MSR and the EAX register is loaded with the
992  // low-order 32 bits. If less than 64 bits are implemented in the MSR being
993  // read, the values returned to EDX:EAX in unimplemented bit locations are
994  // undefined.
995  unsigned long __edx;
996  unsigned long __eax;
997  __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
998  return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
999}
1000
1001static __inline__ unsigned long __DEFAULT_FN_ATTRS
1002__readcr3(void) {
1003  unsigned long __cr3_val;
1004  __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
1005  return __cr3_val;
1006}
1007
1008static __inline__ void __DEFAULT_FN_ATTRS
1009__writecr3(unsigned int __cr3_val) {
1010  __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
1011}
1012#endif
1013
1014#ifdef __cplusplus
1015}
1016#endif
1017
1018#undef __DEFAULT_FN_ATTRS
1019
1020#endif /* __INTRIN_H */
1021#endif /* _MSC_VER */
1022