intrin.h revision 327952
1210284Sjmallett/* ===-------- intrin.h ---------------------------------------------------===
2232812Sjmallett *
3215990Sjmallett * Permission is hereby granted, free of charge, to any person obtaining a copy
4210284Sjmallett * of this software and associated documentation files (the "Software"), to deal
5210284Sjmallett * in the Software without restriction, including without limitation the rights
6215990Sjmallett * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7215990Sjmallett * copies of the Software, and to permit persons to whom the Software is
8215990Sjmallett * furnished to do so, subject to the following conditions:
9210284Sjmallett *
10215990Sjmallett * The above copyright notice and this permission notice shall be included in
11215990Sjmallett * all copies or substantial portions of the Software.
12210284Sjmallett *
13215990Sjmallett * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14215990Sjmallett * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15215990Sjmallett * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16215990Sjmallett * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17215990Sjmallett * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18232812Sjmallett * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19215990Sjmallett * THE SOFTWARE.
20215990Sjmallett *
21215990Sjmallett *===-----------------------------------------------------------------------===
22215990Sjmallett */
23215990Sjmallett
24215990Sjmallett/* Only include this if we're compiling for the windows platform. */
25215990Sjmallett#ifndef _MSC_VER
26215990Sjmallett#include_next <intrin.h>
27215990Sjmallett#else
28215990Sjmallett
29232812Sjmallett#ifndef __INTRIN_H
30215990Sjmallett#define __INTRIN_H
31215990Sjmallett
32215990Sjmallett/* First include the standard intrinsics. */
33215990Sjmallett#if defined(__i386__) || defined(__x86_64__)
34215990Sjmallett#include <x86intrin.h>
35215990Sjmallett#endif
36215990Sjmallett
37215990Sjmallett#if defined(__arm__)
38210284Sjmallett#include <armintr.h>
39210284Sjmallett#endif
40210284Sjmallett
41210284Sjmallett#if defined(_M_ARM64)
42210284Sjmallett#include <arm64intr.h>
43210284Sjmallett#endif
44210284Sjmallett
45215990Sjmallett/* For the definition of jmp_buf. */
46210284Sjmallett#if __STDC_HOSTED__
47210284Sjmallett#include <setjmp.h>
48210284Sjmallett#endif
49210284Sjmallett
50210284Sjmallett/* Define the default attributes for the functions in this file. */
51210284Sjmallett#define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
52210284Sjmallett
53210284Sjmallett#ifdef __cplusplus
54232812Sjmallettextern "C" {
55210284Sjmallett#endif
56210284Sjmallett
57210284Sjmallett#if defined(__MMX__)
58210284Sjmallett/* And the random ones that aren't in those files. */
59210284Sjmallett__m64 _m_from_float(float);
60210284Sjmallettfloat _m_to_float(__m64);
61210284Sjmallett#endif
62210284Sjmallett
63210284Sjmallett/* Other assorted instruction intrinsics. */
64210284Sjmallettvoid __addfsbyte(unsigned long, unsigned char);
65210284Sjmallettvoid __addfsdword(unsigned long, unsigned long);
66210284Sjmallettvoid __addfsword(unsigned long, unsigned short);
67210284Sjmallettvoid __code_seg(const char *);
68232812Sjmallettstatic __inline__
69232812Sjmallettvoid __cpuid(int[4], int);
70232812Sjmallettstatic __inline__
71232812Sjmallettvoid __cpuidex(int[4], int, int);
72232812Sjmallettstatic __inline__
73232812Sjmallett__int64 __emul(int, int);
74232812Sjmallettstatic __inline__
75232812Sjmallettunsigned __int64 __emulu(unsigned int, unsigned int);
76232812Sjmallettunsigned int __getcallerseflags(void);
77210284Sjmallettstatic __inline__
78232812Sjmallettvoid __halt(void);
79232812Sjmallettunsigned char __inbyte(unsigned short);
80232812Sjmallettvoid __inbytestring(unsigned short, unsigned char *, unsigned long);
81232812Sjmallettvoid __incfsbyte(unsigned long);
82232812Sjmallettvoid __incfsdword(unsigned long);
83232812Sjmallettvoid __incfsword(unsigned long);
84232812Sjmallettunsigned long __indword(unsigned short);
85232812Sjmallettvoid __indwordstring(unsigned short, unsigned long *, unsigned long);
86232812Sjmallettvoid __invlpg(void *);
87232812Sjmallettunsigned short __inword(unsigned short);
88232812Sjmallettvoid __inwordstring(unsigned short, unsigned short *, unsigned long);
89232812Sjmallettvoid __lidt(void *);
90232812Sjmallettunsigned __int64 __ll_lshift(unsigned __int64, int);
91232812Sjmallett__int64 __ll_rshift(__int64, int);
92232812Sjmallettunsigned int __lzcnt(unsigned int);
93232812Sjmallettunsigned short __lzcnt16(unsigned short);
94232812Sjmallettstatic __inline__
95232812Sjmallettvoid __movsb(unsigned char *, unsigned char const *, size_t);
96232812Sjmallettstatic __inline__
97232812Sjmallettvoid __movsd(unsigned long *, unsigned long const *, size_t);
98232812Sjmallettstatic __inline__
99232812Sjmallettvoid __movsw(unsigned short *, unsigned short const *, size_t);
100232812Sjmallettstatic __inline__
101232812Sjmallettvoid __nop(void);
102232812Sjmallettvoid __nvreg_restore_fence(void);
103232812Sjmallettvoid __nvreg_save_fence(void);
104232812Sjmallettvoid __outbyte(unsigned short, unsigned char);
105232812Sjmallettvoid __outbytestring(unsigned short, unsigned char *, unsigned long);
106232812Sjmallettvoid __outdword(unsigned short, unsigned long);
107232812Sjmallettvoid __outdwordstring(unsigned short, unsigned long *, unsigned long);
108232812Sjmallettvoid __outword(unsigned short, unsigned short);
109232812Sjmallettvoid __outwordstring(unsigned short, unsigned short *, unsigned long);
110232812Sjmallettunsigned long __readcr0(void);
111232812Sjmallettunsigned long __readcr2(void);
112232812Sjmallettstatic __inline__
113232812Sjmallettunsigned long __readcr3(void);
114232812Sjmallettunsigned long __readcr4(void);
115232812Sjmallettunsigned long __readcr8(void);
116232812Sjmallettunsigned int __readdr(unsigned int);
117232812Sjmallett#ifdef __i386__
118232812Sjmallettstatic __inline__
119232812Sjmallettunsigned char __readfsbyte(unsigned long);
120232812Sjmallettstatic __inline__
121232812Sjmallettunsigned __int64 __readfsqword(unsigned long);
122232812Sjmallettstatic __inline__
123232812Sjmallettunsigned short __readfsword(unsigned long);
124232812Sjmallett#endif
125232812Sjmallettstatic __inline__
126232812Sjmallettunsigned __int64 __readmsr(unsigned long);
127232812Sjmallettunsigned __int64 __readpmc(unsigned long);
128232812Sjmallettunsigned long __segmentlimit(unsigned long);
129232812Sjmallettvoid __sidt(void *);
130232812Sjmallettstatic __inline__
131232812Sjmallettvoid __stosb(unsigned char *, unsigned char, size_t);
132232812Sjmallettstatic __inline__
133232812Sjmallettvoid __stosd(unsigned long *, unsigned long, size_t);
134232812Sjmallettstatic __inline__
135232812Sjmallettvoid __stosw(unsigned short *, unsigned short, size_t);
136232812Sjmallettvoid __svm_clgi(void);
137232812Sjmallettvoid __svm_invlpga(void *, int);
138232812Sjmallettvoid __svm_skinit(int);
139232812Sjmallettvoid __svm_stgi(void);
140232812Sjmallettvoid __svm_vmload(size_t);
141232812Sjmallettvoid __svm_vmrun(size_t);
142232812Sjmallettvoid __svm_vmsave(size_t);
143232812Sjmallettunsigned __int64 __ull_rshift(unsigned __int64, int);
144232812Sjmallettvoid __vmx_off(void);
145232812Sjmallettvoid __vmx_vmptrst(unsigned __int64 *);
146232812Sjmallettvoid __wbinvd(void);
147232812Sjmallettvoid __writecr0(unsigned int);
148232812Sjmallettstatic __inline__
149232812Sjmallettvoid __writecr3(unsigned int);
150232812Sjmallettvoid __writecr4(unsigned int);
151232812Sjmallettvoid __writecr8(unsigned int);
152232812Sjmallettvoid __writedr(unsigned int, unsigned int);
153232812Sjmallettvoid __writefsbyte(unsigned long, unsigned char);
154232812Sjmallettvoid __writefsdword(unsigned long, unsigned long);
155232812Sjmallettvoid __writefsqword(unsigned long, unsigned __int64);
156232812Sjmallettvoid __writefsword(unsigned long, unsigned short);
157232812Sjmallettvoid __writemsr(unsigned long, unsigned __int64);
158232812Sjmallettstatic __inline__
159232812Sjmallettvoid *_AddressOfReturnAddress(void);
160232812Sjmallettstatic __inline__
161232812Sjmallettunsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
162232812Sjmallettstatic __inline__
163232812Sjmallettunsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
164232812Sjmallettstatic __inline__
165232812Sjmallettunsigned char _bittest(long const *, long);
166232812Sjmallettstatic __inline__
167232812Sjmallettunsigned char _bittestandcomplement(long *, long);
168232812Sjmallettstatic __inline__
169232812Sjmallettunsigned char _bittestandreset(long *, long);
170232812Sjmallettstatic __inline__
171232812Sjmallettunsigned char _bittestandset(long *, long);
172232812Sjmallettvoid __cdecl _disable(void);
173232812Sjmallettvoid __cdecl _enable(void);
174232812Sjmallettlong _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
175232812Sjmallettunsigned char _interlockedbittestandreset(long volatile *, long);
176232812Sjmallettunsigned char _interlockedbittestandset(long volatile *, long);
177232812Sjmallettlong _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
178232812Sjmallettlong _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
179232812Sjmallett__int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
180232812Sjmallett                                                 __int64);
181232812Sjmallett__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
182232812Sjmallett                                                 __int64);
183232812Sjmallettvoid *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
184232812Sjmallett                                                    void *);
185232812Sjmallettvoid *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
186232812Sjmallett                                                    void *);
187232812Sjmallettlong _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
188232812Sjmallettlong _InterlockedExchangeAdd_HLERelease(long volatile *, long);
189232812Sjmallett__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
190232812Sjmallett__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
191232812Sjmallettvoid __cdecl _invpcid(unsigned int, void *);
192232812Sjmallettstatic __inline__ void
193232812Sjmallett__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
194232812Sjmallett_ReadBarrier(void);
195232812Sjmallettstatic __inline__ void
196232812Sjmallett__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
197232812Sjmallett_ReadWriteBarrier(void);
198232812Sjmallettunsigned int _rorx_u32(unsigned int, const unsigned int);
199232812Sjmallettint _sarx_i32(int, unsigned int);
200232812Sjmallett#if __STDC_HOSTED__
201232812Sjmallettint __cdecl _setjmp(jmp_buf);
202232812Sjmallett#endif
203232812Sjmallettunsigned int _shlx_u32(unsigned int, unsigned int);
204232812Sjmallettunsigned int _shrx_u32(unsigned int, unsigned int);
205232812Sjmallettvoid _Store_HLERelease(long volatile *, long);
206232812Sjmallettvoid _Store64_HLERelease(__int64 volatile *, __int64);
207232812Sjmallettvoid _StorePointer_HLERelease(void *volatile *, void *);
208232812Sjmallettstatic __inline__ void
209232812Sjmallett__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
210232812Sjmallett_WriteBarrier(void);
211232812Sjmallettunsigned __int32 xbegin(void);
212232812Sjmallettvoid _xend(void);
213232812Sjmallettstatic __inline__
214232812Sjmallett#define _XCR_XFEATURE_ENABLED_MASK 0
215232812Sjmallettunsigned __int64 __cdecl _xgetbv(unsigned int);
216232812Sjmallettvoid __cdecl _xsetbv(unsigned int, unsigned __int64);
217232812Sjmallett
218232812Sjmallett/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
219232812Sjmallett#ifdef __x86_64__
220232812Sjmallettvoid __addgsbyte(unsigned long, unsigned char);
221232812Sjmallettvoid __addgsdword(unsigned long, unsigned long);
222232812Sjmallettvoid __addgsqword(unsigned long, unsigned __int64);
223232812Sjmallettvoid __addgsword(unsigned long, unsigned short);
224232812Sjmallettstatic __inline__
225210284Sjmallettvoid __faststorefence(void);
226210284Sjmallettvoid __incgsbyte(unsigned long);
227210284Sjmallettvoid __incgsdword(unsigned long);
228210284Sjmallettvoid __incgsqword(unsigned long);
229210284Sjmallettvoid __incgsword(unsigned long);
230210284Sjmallettunsigned __int64 __lzcnt64(unsigned __int64);
231210284Sjmallettstatic __inline__
232210284Sjmallettvoid __movsq(unsigned long long *, unsigned long long const *, size_t);
233210284Sjmallettstatic __inline__
234210284Sjmallettunsigned char __readgsbyte(unsigned long);
235210284Sjmallettstatic __inline__
236210284Sjmallettunsigned long __readgsdword(unsigned long);
237210284Sjmallettstatic __inline__
238210284Sjmallettunsigned __int64 __readgsqword(unsigned long);
239210284Sjmallettunsigned short __readgsword(unsigned long);
240210284Sjmallettunsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
241210284Sjmallett                                unsigned __int64 _HighPart,
242210284Sjmallett                                unsigned char _Shift);
243210284Sjmallettunsigned __int64 __shiftright128(unsigned __int64 _LowPart,
244210284Sjmallett                                 unsigned __int64 _HighPart,
245210284Sjmallett                                 unsigned char _Shift);
246210284Sjmallettstatic __inline__
247210284Sjmallettvoid __stosq(unsigned __int64 *, unsigned __int64, size_t);
248210284Sjmallettunsigned char __vmx_on(unsigned __int64 *);
249210284Sjmallettunsigned char __vmx_vmclear(unsigned __int64 *);
250210284Sjmallettunsigned char __vmx_vmlaunch(void);
251210284Sjmallettunsigned char __vmx_vmptrld(unsigned __int64 *);
252210284Sjmallettunsigned char __vmx_vmread(size_t, size_t *);
253210284Sjmallettunsigned char __vmx_vmresume(void);
254210284Sjmallettunsigned char __vmx_vmwrite(size_t, size_t);
255210284Sjmallettvoid __writegsbyte(unsigned long, unsigned char);
256210284Sjmallettvoid __writegsdword(unsigned long, unsigned long);
257210284Sjmallettvoid __writegsqword(unsigned long, unsigned __int64);
258210284Sjmallettvoid __writegsword(unsigned long, unsigned short);
259210284Sjmallettstatic __inline__
260232812Sjmallettunsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
261210284Sjmallettstatic __inline__
262210284Sjmallettunsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
263210284Sjmallettstatic __inline__
264210284Sjmallettunsigned char _bittest64(__int64 const *, __int64);
265210284Sjmallettstatic __inline__
266210284Sjmallettunsigned char _bittestandcomplement64(__int64 *, __int64);
267210284Sjmallettstatic __inline__
268210284Sjmallettunsigned char _bittestandreset64(__int64 *, __int64);
269210284Sjmallettstatic __inline__
270210284Sjmallettunsigned char _bittestandset64(__int64 *, __int64);
271210284Sjmallettlong _InterlockedAnd_np(long volatile *_Value, long _Mask);
272210284Sjmallettshort _InterlockedAnd16_np(short volatile *_Value, short _Mask);
273210284Sjmallett__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
274232812Sjmallettchar _InterlockedAnd8_np(char volatile *_Value, char _Mask);
275210284Sjmallettunsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
276210284Sjmallettstatic __inline__
277210284Sjmallettunsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
278210284Sjmallettlong _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
279210284Sjmallett                                    long _Comparand);
280210284Sjmallettunsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
281210284Sjmallett                                             __int64 _ExchangeHigh,
282210284Sjmallett                                             __int64 _ExchangeLow,
283210284Sjmallett                                             __int64 *_CompareandResult);
284210284Sjmallettunsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
285210284Sjmallett                                                __int64 _ExchangeHigh,
286210284Sjmallett                                                __int64 _ExchangeLow,
287210284Sjmallett                                                __int64 *_ComparandResult);
288210284Sjmallettshort _InterlockedCompareExchange16_np(short volatile *_Destination,
289210284Sjmallett                                       short _Exchange, short _Comparand);
290210284Sjmallett__int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
291210284Sjmallett                                                 __int64);
292210284Sjmallett__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
293210284Sjmallett                                                 __int64);
294210284Sjmallett__int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
295210284Sjmallett                                         __int64 _Exchange, __int64 _Comparand);
296210284Sjmallettvoid *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
297210284Sjmallett                                            void *_Exchange, void *_Comparand);
298210284Sjmallettlong _InterlockedOr_np(long volatile *_Value, long _Mask);
299210284Sjmallettshort _InterlockedOr16_np(short volatile *_Value, short _Mask);
300210284Sjmallett__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
301210284Sjmallettchar _InterlockedOr8_np(char volatile *_Value, char _Mask);
302210284Sjmallettlong _InterlockedXor_np(long volatile *_Value, long _Mask);
303210284Sjmallettshort _InterlockedXor16_np(short volatile *_Value, short _Mask);
304210284Sjmallett__int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
305210284Sjmallettchar _InterlockedXor8_np(char volatile *_Value, char _Mask);
306210284Sjmallettunsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
307210284Sjmallett__int64 _sarx_i64(__int64, unsigned int);
308210284Sjmallettunsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
309210284Sjmallettunsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
310210284Sjmallettstatic __inline__
311210284Sjmallett__int64 __mulh(__int64, __int64);
312210284Sjmallettstatic __inline__
313210284Sjmallettunsigned __int64 __umulh(unsigned __int64, unsigned __int64);
314210284Sjmallettstatic __inline__
315210284Sjmallett__int64 _mul128(__int64, __int64, __int64*);
316210284Sjmallettstatic __inline__
317210284Sjmallettunsigned __int64 _umul128(unsigned __int64,
318210284Sjmallett                          unsigned __int64,
319210284Sjmallett                          unsigned __int64*);
320
321#endif /* __x86_64__ */
322
323#if defined(__x86_64__) || defined(__arm__)
324
325static __inline__
326__int64 _InterlockedDecrement64(__int64 volatile *_Addend);
327static __inline__
328__int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
329static __inline__
330__int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
331static __inline__
332__int64 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value);
333static __inline__
334__int64 _InterlockedIncrement64(__int64 volatile *_Addend);
335static __inline__
336__int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
337static __inline__
338__int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
339static __inline__
340__int64 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask);
341
342#endif
343
344/*----------------------------------------------------------------------------*\
345|* Bit Counting and Testing
346\*----------------------------------------------------------------------------*/
347static __inline__ unsigned char __DEFAULT_FN_ATTRS
348_bittest(long const *_BitBase, long _BitPos) {
349  return (*_BitBase >> _BitPos) & 1;
350}
351static __inline__ unsigned char __DEFAULT_FN_ATTRS
352_bittestandcomplement(long *_BitBase, long _BitPos) {
353  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
354  *_BitBase = *_BitBase ^ (1 << _BitPos);
355  return _Res;
356}
357static __inline__ unsigned char __DEFAULT_FN_ATTRS
358_bittestandreset(long *_BitBase, long _BitPos) {
359  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
360  *_BitBase = *_BitBase & ~(1 << _BitPos);
361  return _Res;
362}
363static __inline__ unsigned char __DEFAULT_FN_ATTRS
364_bittestandset(long *_BitBase, long _BitPos) {
365  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
366  *_BitBase = *_BitBase | (1 << _BitPos);
367  return _Res;
368}
369#if defined(__arm__) || defined(__aarch64__)
370static __inline__ unsigned char __DEFAULT_FN_ATTRS
371_interlockedbittestandset_acq(long volatile *_BitBase, long _BitPos) {
372  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE);
373  return (_PrevVal >> _BitPos) & 1;
374}
375static __inline__ unsigned char __DEFAULT_FN_ATTRS
376_interlockedbittestandset_nf(long volatile *_BitBase, long _BitPos) {
377  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED);
378  return (_PrevVal >> _BitPos) & 1;
379}
380static __inline__ unsigned char __DEFAULT_FN_ATTRS
381_interlockedbittestandset_rel(long volatile *_BitBase, long _BitPos) {
382  long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE);
383  return (_PrevVal >> _BitPos) & 1;
384}
385#endif
386#ifdef __x86_64__
387static __inline__ unsigned char __DEFAULT_FN_ATTRS
388_bittest64(__int64 const *_BitBase, __int64 _BitPos) {
389  return (*_BitBase >> _BitPos) & 1;
390}
391static __inline__ unsigned char __DEFAULT_FN_ATTRS
392_bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
393  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
394  *_BitBase = *_BitBase ^ (1ll << _BitPos);
395  return _Res;
396}
397static __inline__ unsigned char __DEFAULT_FN_ATTRS
398_bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
399  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
400  *_BitBase = *_BitBase & ~(1ll << _BitPos);
401  return _Res;
402}
403static __inline__ unsigned char __DEFAULT_FN_ATTRS
404_bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
405  unsigned char _Res = (*_BitBase >> _BitPos) & 1;
406  *_BitBase = *_BitBase | (1ll << _BitPos);
407  return _Res;
408}
409static __inline__ unsigned char __DEFAULT_FN_ATTRS
410_interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
411  long long _PrevVal =
412      __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
413  return (_PrevVal >> _BitPos) & 1;
414}
415#endif
416/*----------------------------------------------------------------------------*\
417|* Interlocked Exchange Add
418\*----------------------------------------------------------------------------*/
419#if defined(__arm__) || defined(__aarch64__)
420static __inline__ char __DEFAULT_FN_ATTRS
421_InterlockedExchangeAdd8_acq(char volatile *_Addend, char _Value) {
422  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
423}
424static __inline__ char __DEFAULT_FN_ATTRS
425_InterlockedExchangeAdd8_nf(char volatile *_Addend, char _Value) {
426  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
427}
428static __inline__ char __DEFAULT_FN_ATTRS
429_InterlockedExchangeAdd8_rel(char volatile *_Addend, char _Value) {
430  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
431}
432static __inline__ short __DEFAULT_FN_ATTRS
433_InterlockedExchangeAdd16_acq(short volatile *_Addend, short _Value) {
434  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
435}
436static __inline__ short __DEFAULT_FN_ATTRS
437_InterlockedExchangeAdd16_nf(short volatile *_Addend, short _Value) {
438  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
439}
440static __inline__ short __DEFAULT_FN_ATTRS
441_InterlockedExchangeAdd16_rel(short volatile *_Addend, short _Value) {
442  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
443}
444static __inline__ long __DEFAULT_FN_ATTRS
445_InterlockedExchangeAdd_acq(long volatile *_Addend, long _Value) {
446  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
447}
448static __inline__ long __DEFAULT_FN_ATTRS
449_InterlockedExchangeAdd_nf(long volatile *_Addend, long _Value) {
450  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
451}
452static __inline__ long __DEFAULT_FN_ATTRS
453_InterlockedExchangeAdd_rel(long volatile *_Addend, long _Value) {
454  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
455}
456static __inline__ __int64 __DEFAULT_FN_ATTRS
457_InterlockedExchangeAdd64_acq(__int64 volatile *_Addend, __int64 _Value) {
458  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
459}
460static __inline__ __int64 __DEFAULT_FN_ATTRS
461_InterlockedExchangeAdd64_nf(__int64 volatile *_Addend, __int64 _Value) {
462  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
463}
464static __inline__ __int64 __DEFAULT_FN_ATTRS
465_InterlockedExchangeAdd64_rel(__int64 volatile *_Addend, __int64 _Value) {
466  return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
467}
468#endif
469/*----------------------------------------------------------------------------*\
470|* Interlocked Increment
471\*----------------------------------------------------------------------------*/
472#if defined(__arm__) || defined(__aarch64__)
473static __inline__ short __DEFAULT_FN_ATTRS
474_InterlockedIncrement16_acq(short volatile *_Value) {
475  return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
476}
477static __inline__ short __DEFAULT_FN_ATTRS
478_InterlockedIncrement16_nf(short volatile *_Value) {
479  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
480}
481static __inline__ short __DEFAULT_FN_ATTRS
482_InterlockedIncrement16_rel(short volatile *_Value) {
483  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
484}
485static __inline__ long __DEFAULT_FN_ATTRS
486_InterlockedIncrement_acq(long volatile *_Value) {
487  return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
488}
489static __inline__ long __DEFAULT_FN_ATTRS
490_InterlockedIncrement_nf(long volatile *_Value) {
491  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
492}
493static __inline__ long __DEFAULT_FN_ATTRS
494_InterlockedIncrement_rel(long volatile *_Value) {
495  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
496}
497static __inline__ __int64 __DEFAULT_FN_ATTRS
498_InterlockedIncrement64_acq(__int64 volatile *_Value) {
499  return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
500}
501static __inline__ __int64 __DEFAULT_FN_ATTRS
502_InterlockedIncrement64_nf(__int64 volatile *_Value) {
503  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
504}
505static __inline__ __int64 __DEFAULT_FN_ATTRS
506_InterlockedIncrement64_rel(__int64 volatile *_Value) {
507  return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
508}
509#endif
510/*----------------------------------------------------------------------------*\
511|* Interlocked Decrement
512\*----------------------------------------------------------------------------*/
513#if defined(__arm__) || defined(__aarch64__)
514static __inline__ short __DEFAULT_FN_ATTRS
515_InterlockedDecrement16_acq(short volatile *_Value) {
516  return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
517}
518static __inline__ short __DEFAULT_FN_ATTRS
519_InterlockedDecrement16_nf(short volatile *_Value) {
520  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
521}
522static __inline__ short __DEFAULT_FN_ATTRS
523_InterlockedDecrement16_rel(short volatile *_Value) {
524  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
525}
526static __inline__ long __DEFAULT_FN_ATTRS
527_InterlockedDecrement_acq(long volatile *_Value) {
528  return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
529}
530static __inline__ long __DEFAULT_FN_ATTRS
531_InterlockedDecrement_nf(long volatile *_Value) {
532  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
533}
534static __inline__ long __DEFAULT_FN_ATTRS
535_InterlockedDecrement_rel(long volatile *_Value) {
536  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
537}
538static __inline__ __int64 __DEFAULT_FN_ATTRS
539_InterlockedDecrement64_acq(__int64 volatile *_Value) {
540  return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
541}
542static __inline__ __int64 __DEFAULT_FN_ATTRS
543_InterlockedDecrement64_nf(__int64 volatile *_Value) {
544  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
545}
546static __inline__ __int64 __DEFAULT_FN_ATTRS
547_InterlockedDecrement64_rel(__int64 volatile *_Value) {
548  return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
549}
550#endif
551/*----------------------------------------------------------------------------*\
552|* Interlocked And
553\*----------------------------------------------------------------------------*/
554#if defined(__arm__) || defined(__aarch64__)
555static __inline__ char __DEFAULT_FN_ATTRS
556_InterlockedAnd8_acq(char volatile *_Value, char _Mask) {
557  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
558}
559static __inline__ char __DEFAULT_FN_ATTRS
560_InterlockedAnd8_nf(char volatile *_Value, char _Mask) {
561  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
562}
563static __inline__ char __DEFAULT_FN_ATTRS
564_InterlockedAnd8_rel(char volatile *_Value, char _Mask) {
565  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
566}
567static __inline__ short __DEFAULT_FN_ATTRS
568_InterlockedAnd16_acq(short volatile *_Value, short _Mask) {
569  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
570}
571static __inline__ short __DEFAULT_FN_ATTRS
572_InterlockedAnd16_nf(short volatile *_Value, short _Mask) {
573  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
574}
575static __inline__ short __DEFAULT_FN_ATTRS
576_InterlockedAnd16_rel(short volatile *_Value, short _Mask) {
577  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
578}
579static __inline__ long __DEFAULT_FN_ATTRS
580_InterlockedAnd_acq(long volatile *_Value, long _Mask) {
581  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
582}
583static __inline__ long __DEFAULT_FN_ATTRS
584_InterlockedAnd_nf(long volatile *_Value, long _Mask) {
585  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
586}
587static __inline__ long __DEFAULT_FN_ATTRS
588_InterlockedAnd_rel(long volatile *_Value, long _Mask) {
589  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
590}
591static __inline__ __int64 __DEFAULT_FN_ATTRS
592_InterlockedAnd64_acq(__int64 volatile *_Value, __int64 _Mask) {
593  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
594}
595static __inline__ __int64 __DEFAULT_FN_ATTRS
596_InterlockedAnd64_nf(__int64 volatile *_Value, __int64 _Mask) {
597  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
598}
599static __inline__ __int64 __DEFAULT_FN_ATTRS
600_InterlockedAnd64_rel(__int64 volatile *_Value, __int64 _Mask) {
601  return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
602}
603#endif
604/*----------------------------------------------------------------------------*\
605|* Interlocked Or
606\*----------------------------------------------------------------------------*/
607#if defined(__arm__) || defined(__aarch64__)
608static __inline__ char __DEFAULT_FN_ATTRS
609_InterlockedOr8_acq(char volatile *_Value, char _Mask) {
610  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
611}
612static __inline__ char __DEFAULT_FN_ATTRS
613_InterlockedOr8_nf(char volatile *_Value, char _Mask) {
614  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
615}
616static __inline__ char __DEFAULT_FN_ATTRS
617_InterlockedOr8_rel(char volatile *_Value, char _Mask) {
618  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
619}
620static __inline__ short __DEFAULT_FN_ATTRS
621_InterlockedOr16_acq(short volatile *_Value, short _Mask) {
622  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
623}
624static __inline__ short __DEFAULT_FN_ATTRS
625_InterlockedOr16_nf(short volatile *_Value, short _Mask) {
626  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
627}
628static __inline__ short __DEFAULT_FN_ATTRS
629_InterlockedOr16_rel(short volatile *_Value, short _Mask) {
630  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
631}
632static __inline__ long __DEFAULT_FN_ATTRS
633_InterlockedOr_acq(long volatile *_Value, long _Mask) {
634  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
635}
636static __inline__ long __DEFAULT_FN_ATTRS
637_InterlockedOr_nf(long volatile *_Value, long _Mask) {
638  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
639}
640static __inline__ long __DEFAULT_FN_ATTRS
641_InterlockedOr_rel(long volatile *_Value, long _Mask) {
642  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
643}
644static __inline__ __int64 __DEFAULT_FN_ATTRS
645_InterlockedOr64_acq(__int64 volatile *_Value, __int64 _Mask) {
646  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
647}
648static __inline__ __int64 __DEFAULT_FN_ATTRS
649_InterlockedOr64_nf(__int64 volatile *_Value, __int64 _Mask) {
650  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
651}
652static __inline__ __int64 __DEFAULT_FN_ATTRS
653_InterlockedOr64_rel(__int64 volatile *_Value, __int64 _Mask) {
654  return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
655}
656#endif
657/*----------------------------------------------------------------------------*\
658|* Interlocked Xor
659\*----------------------------------------------------------------------------*/
660#if defined(__arm__) || defined(__aarch64__)
661static __inline__ char __DEFAULT_FN_ATTRS
662_InterlockedXor8_acq(char volatile *_Value, char _Mask) {
663  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
664}
665static __inline__ char __DEFAULT_FN_ATTRS
666_InterlockedXor8_nf(char volatile *_Value, char _Mask) {
667  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
668}
669static __inline__ char __DEFAULT_FN_ATTRS
670_InterlockedXor8_rel(char volatile *_Value, char _Mask) {
671  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
672}
673static __inline__ short __DEFAULT_FN_ATTRS
674_InterlockedXor16_acq(short volatile *_Value, short _Mask) {
675  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
676}
677static __inline__ short __DEFAULT_FN_ATTRS
678_InterlockedXor16_nf(short volatile *_Value, short _Mask) {
679  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
680}
681static __inline__ short __DEFAULT_FN_ATTRS
682_InterlockedXor16_rel(short volatile *_Value, short _Mask) {
683  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
684}
685static __inline__ long __DEFAULT_FN_ATTRS
686_InterlockedXor_acq(long volatile *_Value, long _Mask) {
687  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
688}
689static __inline__ long __DEFAULT_FN_ATTRS
690_InterlockedXor_nf(long volatile *_Value, long _Mask) {
691  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
692}
693static __inline__ long __DEFAULT_FN_ATTRS
694_InterlockedXor_rel(long volatile *_Value, long _Mask) {
695  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
696}
697static __inline__ __int64 __DEFAULT_FN_ATTRS
698_InterlockedXor64_acq(__int64 volatile *_Value, __int64 _Mask) {
699  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
700}
701static __inline__ __int64 __DEFAULT_FN_ATTRS
702_InterlockedXor64_nf(__int64 volatile *_Value, __int64 _Mask) {
703  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
704}
705static __inline__ __int64 __DEFAULT_FN_ATTRS
706_InterlockedXor64_rel(__int64 volatile *_Value, __int64 _Mask) {
707  return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
708}
709#endif
710/*----------------------------------------------------------------------------*\
711|* Interlocked Exchange
712\*----------------------------------------------------------------------------*/
713#if defined(__arm__) || defined(__aarch64__)
714static __inline__ char __DEFAULT_FN_ATTRS
715_InterlockedExchange8_acq(char volatile *_Target, char _Value) {
716  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
717  return _Value;
718}
719static __inline__ char __DEFAULT_FN_ATTRS
720_InterlockedExchange8_nf(char volatile *_Target, char _Value) {
721  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
722  return _Value;
723}
724static __inline__ char __DEFAULT_FN_ATTRS
725_InterlockedExchange8_rel(char volatile *_Target, char _Value) {
726  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
727  return _Value;
728}
729static __inline__ short __DEFAULT_FN_ATTRS
730_InterlockedExchange16_acq(short volatile *_Target, short _Value) {
731  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
732  return _Value;
733}
734static __inline__ short __DEFAULT_FN_ATTRS
735_InterlockedExchange16_nf(short volatile *_Target, short _Value) {
736  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
737  return _Value;
738}
739static __inline__ short __DEFAULT_FN_ATTRS
740_InterlockedExchange16_rel(short volatile *_Target, short _Value) {
741  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
742  return _Value;
743}
744static __inline__ long __DEFAULT_FN_ATTRS
745_InterlockedExchange_acq(long volatile *_Target, long _Value) {
746  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
747  return _Value;
748}
749static __inline__ long __DEFAULT_FN_ATTRS
750_InterlockedExchange_nf(long volatile *_Target, long _Value) {
751  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
752  return _Value;
753}
754static __inline__ long __DEFAULT_FN_ATTRS
755_InterlockedExchange_rel(long volatile *_Target, long _Value) {
756  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
757  return _Value;
758}
759static __inline__ __int64 __DEFAULT_FN_ATTRS
760_InterlockedExchange64_acq(__int64 volatile *_Target, __int64 _Value) {
761  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
762  return _Value;
763}
764static __inline__ __int64 __DEFAULT_FN_ATTRS
765_InterlockedExchange64_nf(__int64 volatile *_Target, __int64 _Value) {
766  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
767  return _Value;
768}
769static __inline__ __int64 __DEFAULT_FN_ATTRS
770_InterlockedExchange64_rel(__int64 volatile *_Target, __int64 _Value) {
771  __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
772  return _Value;
773}
774#endif
775/*----------------------------------------------------------------------------*\
776|* Interlocked Compare Exchange
777\*----------------------------------------------------------------------------*/
778#if defined(__arm__) || defined(__aarch64__)
779static __inline__ char __DEFAULT_FN_ATTRS
780_InterlockedCompareExchange8_acq(char volatile *_Destination,
781                             char _Exchange, char _Comparand) {
782  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
783                            __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
784  return _Comparand;
785}
786static __inline__ char __DEFAULT_FN_ATTRS
787_InterlockedCompareExchange8_nf(char volatile *_Destination,
788                             char _Exchange, char _Comparand) {
789  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
790                            __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
791  return _Comparand;
792}
793static __inline__ char __DEFAULT_FN_ATTRS
794_InterlockedCompareExchange8_rel(char volatile *_Destination,
795                             char _Exchange, char _Comparand) {
796  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
797                            __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
798  return _Comparand;
799}
800static __inline__ short __DEFAULT_FN_ATTRS
801_InterlockedCompareExchange16_acq(short volatile *_Destination,
802                              short _Exchange, short _Comparand) {
803  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
804                            __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
805  return _Comparand;
806}
807static __inline__ short __DEFAULT_FN_ATTRS
808_InterlockedCompareExchange16_nf(short volatile *_Destination,
809                              short _Exchange, short _Comparand) {
810  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
811                            __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
812  return _Comparand;
813}
814static __inline__ short __DEFAULT_FN_ATTRS
815_InterlockedCompareExchange16_rel(short volatile *_Destination,
816                              short _Exchange, short _Comparand) {
817  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
818                            __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
819  return _Comparand;
820}
821static __inline__ long __DEFAULT_FN_ATTRS
822_InterlockedCompareExchange_acq(long volatile *_Destination,
823                              long _Exchange, long _Comparand) {
824  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
825                            __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
826  return _Comparand;
827}
828static __inline__ long __DEFAULT_FN_ATTRS
829_InterlockedCompareExchange_nf(long volatile *_Destination,
830                              long _Exchange, long _Comparand) {
831  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
832                            __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
833  return _Comparand;
834}
835static __inline__ long __DEFAULT_FN_ATTRS
836_InterlockedCompareExchange_rel(long volatile *_Destination,
837                              long _Exchange, long _Comparand) {
838  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
839                            __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
840  return _Comparand;
841}
842static __inline__ __int64 __DEFAULT_FN_ATTRS
843_InterlockedCompareExchange64_acq(__int64 volatile *_Destination,
844                              __int64 _Exchange, __int64 _Comparand) {
845  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
846                            __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
847  return _Comparand;
848}
849static __inline__ __int64 __DEFAULT_FN_ATTRS
850_InterlockedCompareExchange64_nf(__int64 volatile *_Destination,
851                              __int64 _Exchange, __int64 _Comparand) {
852  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
853                            __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
854  return _Comparand;
855}
856static __inline__ __int64 __DEFAULT_FN_ATTRS
857_InterlockedCompareExchange64_rel(__int64 volatile *_Destination,
858                              __int64 _Exchange, __int64 _Comparand) {
859  __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
860                            __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
861  return _Comparand;
862}
863#endif
864
865/*----------------------------------------------------------------------------*\
866|* movs, stos
867\*----------------------------------------------------------------------------*/
868#if defined(__i386__) || defined(__x86_64__)
869static __inline__ void __DEFAULT_FN_ATTRS
870__movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
871  __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n));
872}
873static __inline__ void __DEFAULT_FN_ATTRS
874__movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
875  __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n));
876}
877static __inline__ void __DEFAULT_FN_ATTRS
878__movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
879  __asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n));
880}
881static __inline__ void __DEFAULT_FN_ATTRS
882__stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
883  __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n));
884}
885static __inline__ void __DEFAULT_FN_ATTRS
886__stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
887  __asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n));
888}
889#endif
890#ifdef __x86_64__
891static __inline__ void __DEFAULT_FN_ATTRS
892__movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
893  __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n));
894}
895static __inline__ void __DEFAULT_FN_ATTRS
896__stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
897  __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n));
898}
899#endif
900
901/*----------------------------------------------------------------------------*\
902|* Misc
903\*----------------------------------------------------------------------------*/
904#if defined(__i386__) || defined(__x86_64__)
905static __inline__ void __DEFAULT_FN_ATTRS
906__cpuid(int __info[4], int __level) {
907  __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
908                   : "a"(__level));
909}
910static __inline__ void __DEFAULT_FN_ATTRS
911__cpuidex(int __info[4], int __level, int __ecx) {
912  __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
913                   : "a"(__level), "c"(__ecx));
914}
915static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
916_xgetbv(unsigned int __xcr_no) {
917  unsigned int __eax, __edx;
918  __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
919  return ((unsigned __int64)__edx << 32) | __eax;
920}
921static __inline__ void __DEFAULT_FN_ATTRS
922__halt(void) {
923  __asm__ volatile ("hlt");
924}
925static __inline__ void __DEFAULT_FN_ATTRS
926__nop(void) {
927  __asm__ volatile ("nop");
928}
929#endif
930
931/*----------------------------------------------------------------------------*\
932|* Privileged intrinsics
933\*----------------------------------------------------------------------------*/
934#if defined(__i386__) || defined(__x86_64__)
935static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
936__readmsr(unsigned long __register) {
937  // Loads the contents of a 64-bit model specific register (MSR) specified in
938  // the ECX register into registers EDX:EAX. The EDX register is loaded with
939  // the high-order 32 bits of the MSR and the EAX register is loaded with the
940  // low-order 32 bits. If less than 64 bits are implemented in the MSR being
941  // read, the values returned to EDX:EAX in unimplemented bit locations are
942  // undefined.
943  unsigned long __edx;
944  unsigned long __eax;
945  __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
946  return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
947}
948
949static __inline__ unsigned long __DEFAULT_FN_ATTRS
950__readcr3(void) {
951  unsigned long __cr3_val;
952  __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
953  return __cr3_val;
954}
955
956static __inline__ void __DEFAULT_FN_ATTRS
957__writecr3(unsigned int __cr3_val) {
958  __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
959}
960#endif
961
962#ifdef __cplusplus
963}
964#endif
965
966#undef __DEFAULT_FN_ATTRS
967
968#endif /* __INTRIN_H */
969#endif /* _MSC_VER */
970