kmp_os.h revision 360784
1/*
2 * kmp_os.h -- KPTS runtime header file.
3 */
4
5//===----------------------------------------------------------------------===//
6//
7// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8// See https://llvm.org/LICENSE.txt for license information.
9// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
10//
11//===----------------------------------------------------------------------===//
12
13#ifndef KMP_OS_H
14#define KMP_OS_H
15
16#include "kmp_config.h"
17#include <stdlib.h>
18#include <atomic>
19
20#define KMP_FTN_PLAIN 1
21#define KMP_FTN_APPEND 2
22#define KMP_FTN_UPPER 3
23/*
24#define KMP_FTN_PREPEND 4
25#define KMP_FTN_UAPPEND 5
26*/
27
28#define KMP_PTR_SKIP (sizeof(void *))
29
30/* -------------------------- Compiler variations ------------------------ */
31
32#define KMP_OFF 0
33#define KMP_ON 1
34
35#define KMP_MEM_CONS_VOLATILE 0
36#define KMP_MEM_CONS_FENCE 1
37
38#ifndef KMP_MEM_CONS_MODEL
39#define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
40#endif
41
42#ifndef __has_cpp_attribute
43#define __has_cpp_attribute(x) 0
44#endif
45
46#ifndef __has_attribute
47#define __has_attribute(x) 0
48#endif
49
50/* ------------------------- Compiler recognition ---------------------- */
51#define KMP_COMPILER_ICC 0
52#define KMP_COMPILER_GCC 0
53#define KMP_COMPILER_CLANG 0
54#define KMP_COMPILER_MSVC 0
55
56#if defined(__INTEL_COMPILER)
57#undef KMP_COMPILER_ICC
58#define KMP_COMPILER_ICC 1
59#elif defined(__clang__)
60#undef KMP_COMPILER_CLANG
61#define KMP_COMPILER_CLANG 1
62#elif defined(__GNUC__)
63#undef KMP_COMPILER_GCC
64#define KMP_COMPILER_GCC 1
65#elif defined(_MSC_VER)
66#undef KMP_COMPILER_MSVC
67#define KMP_COMPILER_MSVC 1
68#else
69#error Unknown compiler
70#endif
71
72#if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD) && !KMP_OS_CNK
73#define KMP_AFFINITY_SUPPORTED 1
74#if KMP_OS_WINDOWS && KMP_ARCH_X86_64
75#define KMP_GROUP_AFFINITY 1
76#else
77#define KMP_GROUP_AFFINITY 0
78#endif
79#else
80#define KMP_AFFINITY_SUPPORTED 0
81#define KMP_GROUP_AFFINITY 0
82#endif
83
84/* Check for quad-precision extension. */
85#define KMP_HAVE_QUAD 0
86#if KMP_ARCH_X86 || KMP_ARCH_X86_64
87#if KMP_COMPILER_ICC
88/* _Quad is already defined for icc */
89#undef KMP_HAVE_QUAD
90#define KMP_HAVE_QUAD 1
91#elif KMP_COMPILER_CLANG
92/* Clang doesn't support a software-implemented
93   128-bit extended precision type yet */
94typedef long double _Quad;
95#elif KMP_COMPILER_GCC
96/* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad */
97#if !KMP_OS_NETBSD
98typedef __float128 _Quad;
99#undef KMP_HAVE_QUAD
100#define KMP_HAVE_QUAD 1
101#endif
102#elif KMP_COMPILER_MSVC
103typedef long double _Quad;
104#endif
105#else
106#if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
107typedef long double _Quad;
108#undef KMP_HAVE_QUAD
109#define KMP_HAVE_QUAD 1
110#endif
111#endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
112
113#define KMP_USE_X87CONTROL 0
114#if KMP_OS_WINDOWS
115#define KMP_END_OF_LINE "\r\n"
116typedef char kmp_int8;
117typedef unsigned char kmp_uint8;
118typedef short kmp_int16;
119typedef unsigned short kmp_uint16;
120typedef int kmp_int32;
121typedef unsigned int kmp_uint32;
122#define KMP_INT32_SPEC "d"
123#define KMP_UINT32_SPEC "u"
124#ifndef KMP_STRUCT64
125typedef __int64 kmp_int64;
126typedef unsigned __int64 kmp_uint64;
127#define KMP_INT64_SPEC "I64d"
128#define KMP_UINT64_SPEC "I64u"
129#else
130struct kmp_struct64 {
131  kmp_int32 a, b;
132};
133typedef struct kmp_struct64 kmp_int64;
134typedef struct kmp_struct64 kmp_uint64;
135/* Not sure what to use for KMP_[U]INT64_SPEC here */
136#endif
137#if KMP_ARCH_X86 && KMP_MSVC_COMPAT
138#undef KMP_USE_X87CONTROL
139#define KMP_USE_X87CONTROL 1
140#endif
141#if KMP_ARCH_X86_64
142#define KMP_INTPTR 1
143typedef __int64 kmp_intptr_t;
144typedef unsigned __int64 kmp_uintptr_t;
145#define KMP_INTPTR_SPEC "I64d"
146#define KMP_UINTPTR_SPEC "I64u"
147#endif
148#endif /* KMP_OS_WINDOWS */
149
150#if KMP_OS_UNIX
151#define KMP_END_OF_LINE "\n"
152typedef char kmp_int8;
153typedef unsigned char kmp_uint8;
154typedef short kmp_int16;
155typedef unsigned short kmp_uint16;
156typedef int kmp_int32;
157typedef unsigned int kmp_uint32;
158typedef long long kmp_int64;
159typedef unsigned long long kmp_uint64;
160#define KMP_INT32_SPEC "d"
161#define KMP_UINT32_SPEC "u"
162#define KMP_INT64_SPEC "lld"
163#define KMP_UINT64_SPEC "llu"
164#endif /* KMP_OS_UNIX */
165
166#if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
167#define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
168#elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 ||                 \
169    KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
170#define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
171#else
172#error "Can't determine size_t printf format specifier."
173#endif
174
175#if KMP_ARCH_X86
176#define KMP_SIZE_T_MAX (0xFFFFFFFF)
177#else
178#define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
179#endif
180
181typedef size_t kmp_size_t;
182typedef float kmp_real32;
183typedef double kmp_real64;
184
185#ifndef KMP_INTPTR
186#define KMP_INTPTR 1
187typedef long kmp_intptr_t;
188typedef unsigned long kmp_uintptr_t;
189#define KMP_INTPTR_SPEC "ld"
190#define KMP_UINTPTR_SPEC "lu"
191#endif
192
193#ifdef BUILD_I8
194typedef kmp_int64 kmp_int;
195typedef kmp_uint64 kmp_uint;
196#else
197typedef kmp_int32 kmp_int;
198typedef kmp_uint32 kmp_uint;
199#endif /* BUILD_I8 */
200#define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
201#define KMP_INT_MIN ((kmp_int32)0x80000000)
202
203#ifdef __cplusplus
204// macros to cast out qualifiers and to re-interpret types
205#define CCAST(type, var) const_cast<type>(var)
206#define RCAST(type, var) reinterpret_cast<type>(var)
207//-------------------------------------------------------------------------
208// template for debug prints specification ( d, u, lld, llu ), and to obtain
209// signed/unsigned flavors of a type
210template <typename T> struct traits_t {};
211// int
212template <> struct traits_t<signed int> {
213  typedef signed int signed_t;
214  typedef unsigned int unsigned_t;
215  typedef double floating_t;
216  static char const *spec;
217  static const signed_t max_value = 0x7fffffff;
218  static const signed_t min_value = 0x80000000;
219  static const int type_size = sizeof(signed_t);
220};
221// unsigned int
222template <> struct traits_t<unsigned int> {
223  typedef signed int signed_t;
224  typedef unsigned int unsigned_t;
225  typedef double floating_t;
226  static char const *spec;
227  static const unsigned_t max_value = 0xffffffff;
228  static const unsigned_t min_value = 0x00000000;
229  static const int type_size = sizeof(unsigned_t);
230};
231// long
232template <> struct traits_t<signed long> {
233  typedef signed long signed_t;
234  typedef unsigned long unsigned_t;
235  typedef long double floating_t;
236  static char const *spec;
237  static const int type_size = sizeof(signed_t);
238};
239// long long
240template <> struct traits_t<signed long long> {
241  typedef signed long long signed_t;
242  typedef unsigned long long unsigned_t;
243  typedef long double floating_t;
244  static char const *spec;
245  static const signed_t max_value = 0x7fffffffffffffffLL;
246  static const signed_t min_value = 0x8000000000000000LL;
247  static const int type_size = sizeof(signed_t);
248};
249// unsigned long long
250template <> struct traits_t<unsigned long long> {
251  typedef signed long long signed_t;
252  typedef unsigned long long unsigned_t;
253  typedef long double floating_t;
254  static char const *spec;
255  static const unsigned_t max_value = 0xffffffffffffffffLL;
256  static const unsigned_t min_value = 0x0000000000000000LL;
257  static const int type_size = sizeof(unsigned_t);
258};
259//-------------------------------------------------------------------------
260#else
261#define CCAST(type, var) (type)(var)
262#define RCAST(type, var) (type)(var)
263#endif // __cplusplus
264
265#define KMP_EXPORT extern /* export declaration in guide libraries */
266
267#if __GNUC__ >= 4 && !defined(__MINGW32__)
268#define __forceinline __inline
269#endif
270
271#if KMP_OS_WINDOWS
272#include <windows.h>
273
274static inline int KMP_GET_PAGE_SIZE(void) {
275  SYSTEM_INFO si;
276  GetSystemInfo(&si);
277  return si.dwPageSize;
278}
279#else
280#define KMP_GET_PAGE_SIZE() getpagesize()
281#endif
282
283#define PAGE_ALIGNED(_addr)                                                    \
284  (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
285#define ALIGN_TO_PAGE(x)                                                       \
286  (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
287
288/* ---------- Support for cache alignment, padding, etc. ----------------*/
289
290#ifdef __cplusplus
291extern "C" {
292#endif // __cplusplus
293
294#define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
295
296/* Define the default size of the cache line */
297#ifndef CACHE_LINE
298#define CACHE_LINE 128 /* cache line size in bytes */
299#else
300#if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
301// 2006-02-13: This produces too many warnings on OS X*. Disable for now
302#warning CACHE_LINE is too small.
303#endif
304#endif /* CACHE_LINE */
305
306#define KMP_CACHE_PREFETCH(ADDR) /* nothing */
307
308// Define attribute that indicates that the fall through from the previous
309// case label is intentional and should not be diagnosed by a compiler
310//   Code from libcxx/include/__config
311// Use a function like macro to imply that it must be followed by a semicolon
312#if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
313#  define KMP_FALLTHROUGH() [[fallthrough]]
314#elif __has_cpp_attribute(clang::fallthrough)
315#  define KMP_FALLTHROUGH() [[clang::fallthrough]]
316#elif __has_attribute(fallthrough) || __GNUC__ >= 7
317#  define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
318#else
319#  define KMP_FALLTHROUGH() ((void)0)
320#endif
321
322// Define attribute that indicates a function does not return
323#if __cplusplus >= 201103L
324#define KMP_NORETURN [[noreturn]]
325#elif KMP_OS_WINDOWS
326#define KMP_NORETURN __declspec(noreturn)
327#else
328#define KMP_NORETURN __attribute__((noreturn))
329#endif
330
331#if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
332#define KMP_ALIGN(bytes) __declspec(align(bytes))
333#define KMP_THREAD_LOCAL __declspec(thread)
334#define KMP_ALIAS /* Nothing */
335#else
336#define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
337#define KMP_THREAD_LOCAL __thread
338#define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
339#endif
340
341#if KMP_HAVE_WEAK_ATTRIBUTE
342#define KMP_WEAK_ATTRIBUTE __attribute__((weak))
343#else
344#define KMP_WEAK_ATTRIBUTE /* Nothing */
345#endif
346
347// Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
348#ifndef KMP_STR
349#define KMP_STR(x) _KMP_STR(x)
350#define _KMP_STR(x) #x
351#endif
352
353#ifdef KMP_USE_VERSION_SYMBOLS
354// If using versioned symbols, KMP_EXPAND_NAME prepends
355// __kmp_api_ to the real API name
356#define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
357#define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
358#define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str)                         \
359  _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
360#define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver)            \
361  __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias     \
362      __attribute__((alias(KMP_STR(__kmp_api_##api_name))));                    \
363  __asm__(                                                                      \
364      ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
365          api_name) "@" ver_str "\n\t");                                        \
366  __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR(                 \
367      api_name) "@@" default_ver "\n\t")
368#else // KMP_USE_VERSION_SYMBOLS
369#define KMP_EXPAND_NAME(api_name) api_name
370#define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
371#endif // KMP_USE_VERSION_SYMBOLS
372
373/* Temporary note: if performance testing of this passes, we can remove
374   all references to KMP_DO_ALIGN and replace with KMP_ALIGN.  */
375#define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
376#define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
377#define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
378
379/* General purpose fence types for memory operations */
380enum kmp_mem_fence_type {
381  kmp_no_fence, /* No memory fence */
382  kmp_acquire_fence, /* Acquire (read) memory fence */
383  kmp_release_fence, /* Release (write) memory fence */
384  kmp_full_fence /* Full (read+write) memory fence */
385};
386
387// Synchronization primitives
388
389#if KMP_ASM_INTRINS && KMP_OS_WINDOWS
390
391#if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
392#pragma intrinsic(InterlockedExchangeAdd)
393#pragma intrinsic(InterlockedCompareExchange)
394#pragma intrinsic(InterlockedExchange)
395#pragma intrinsic(InterlockedExchange64)
396#endif
397
398// Using InterlockedIncrement / InterlockedDecrement causes a library loading
399// ordering problem, so we use InterlockedExchangeAdd instead.
400#define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
401#define KMP_TEST_THEN_INC_ACQ32(p)                                             \
402  InterlockedExchangeAdd((volatile long *)(p), 1)
403#define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
404#define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
405  InterlockedExchangeAdd((volatile long *)(p), 4)
406#define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
407#define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
408  InterlockedExchangeAdd((volatile long *)(p), -1)
409#define KMP_TEST_THEN_ADD32(p, v)                                              \
410  InterlockedExchangeAdd((volatile long *)(p), (v))
411
412#define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
413  InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
414
415#define KMP_XCHG_FIXED32(p, v)                                                 \
416  InterlockedExchange((volatile long *)(p), (long)(v))
417#define KMP_XCHG_FIXED64(p, v)                                                 \
418  InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
419
420inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
421  kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
422  return *(kmp_real32 *)&tmp;
423}
424
425// Routines that we still need to implement in assembly.
426extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
427extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
428extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
429extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
430extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
431extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
432extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
433extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
434extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
435
436extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
437                                         kmp_int8 sv);
438extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
439                                           kmp_int16 sv);
440extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
441                                           kmp_int32 sv);
442extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
443                                           kmp_int64 sv);
444extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
445                                             kmp_int8 sv);
446extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
447                                               kmp_int16 cv, kmp_int16 sv);
448extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
449                                               kmp_int32 cv, kmp_int32 sv);
450extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
451                                               kmp_int64 cv, kmp_int64 sv);
452
453extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
454extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
455extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
456extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
457extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
458extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
459
460//#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
461//#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
462#define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
463#define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
464//#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
465//#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
466#define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
467#define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
468//#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
469//#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
470#define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
471#define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
472//#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
473#define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
474#define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
475
476#define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
477#define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
478#define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
479#define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
480#define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
481#define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
482
483#define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
484  __kmp_compare_and_store8((p), (cv), (sv))
485#define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
486  __kmp_compare_and_store8((p), (cv), (sv))
487#define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
488  __kmp_compare_and_store16((p), (cv), (sv))
489#define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
490  __kmp_compare_and_store16((p), (cv), (sv))
491#define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
492  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
493                            (kmp_int32)(sv))
494#define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
495  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
496                            (kmp_int32)(sv))
497#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
498  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
499                            (kmp_int64)(sv))
500#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
501  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
502                            (kmp_int64)(sv))
503
504#if KMP_ARCH_X86
505#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
506  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
507                            (kmp_int32)(sv))
508#else /* 64 bit pointers */
509#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
510  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
511                            (kmp_int64)(sv))
512#endif /* KMP_ARCH_X86 */
513
514#define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
515  __kmp_compare_and_store_ret8((p), (cv), (sv))
516#define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
517  __kmp_compare_and_store_ret16((p), (cv), (sv))
518#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
519  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
520                                (kmp_int64)(sv))
521
522#define KMP_XCHG_FIXED8(p, v)                                                  \
523  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
524#define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
525//#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
526//#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
527//#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
528#define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
529
530#elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
531
532/* cast p to correct type so that proper intrinsic will be used */
533#define KMP_TEST_THEN_INC32(p)                                                 \
534  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
535#define KMP_TEST_THEN_INC_ACQ32(p)                                             \
536  __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
537#if KMP_ARCH_MIPS
538#define KMP_TEST_THEN_INC64(p)                                                 \
539  __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
540#define KMP_TEST_THEN_INC_ACQ64(p)                                             \
541  __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
542#else
543#define KMP_TEST_THEN_INC64(p)                                                 \
544  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
545#define KMP_TEST_THEN_INC_ACQ64(p)                                             \
546  __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
547#endif
548#define KMP_TEST_THEN_ADD4_32(p)                                               \
549  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
550#define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
551  __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
552#if KMP_ARCH_MIPS
553#define KMP_TEST_THEN_ADD4_64(p)                                               \
554  __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
555#define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
556  __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
557#define KMP_TEST_THEN_DEC64(p)                                                 \
558  __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
559#define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
560  __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
561#else
562#define KMP_TEST_THEN_ADD4_64(p)                                               \
563  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
564#define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
565  __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
566#define KMP_TEST_THEN_DEC64(p)                                                 \
567  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
568#define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
569  __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
570#endif
571#define KMP_TEST_THEN_DEC32(p)                                                 \
572  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
573#define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
574  __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
575#define KMP_TEST_THEN_ADD8(p, v)                                               \
576  __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
577#define KMP_TEST_THEN_ADD32(p, v)                                              \
578  __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
579#if KMP_ARCH_MIPS
580#define KMP_TEST_THEN_ADD64(p, v)                                              \
581  __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v),              \
582                     __ATOMIC_SEQ_CST)
583#else
584#define KMP_TEST_THEN_ADD64(p, v)                                              \
585  __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
586#endif
587
588#define KMP_TEST_THEN_OR8(p, v)                                                \
589  __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
590#define KMP_TEST_THEN_AND8(p, v)                                               \
591  __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
592#define KMP_TEST_THEN_OR32(p, v)                                               \
593  __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
594#define KMP_TEST_THEN_AND32(p, v)                                              \
595  __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
596#if KMP_ARCH_MIPS
597#define KMP_TEST_THEN_OR64(p, v)                                               \
598  __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v),               \
599                    __ATOMIC_SEQ_CST)
600#define KMP_TEST_THEN_AND64(p, v)                                              \
601  __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v),              \
602                     __ATOMIC_SEQ_CST)
603#else
604#define KMP_TEST_THEN_OR64(p, v)                                               \
605  __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
606#define KMP_TEST_THEN_AND64(p, v)                                              \
607  __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
608#endif
609
610#define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
611  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),     \
612                               (kmp_uint8)(sv))
613#define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
614  __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),     \
615                               (kmp_uint8)(sv))
616#define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
617  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),   \
618                               (kmp_uint16)(sv))
619#define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
620  __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),   \
621                               (kmp_uint16)(sv))
622#define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
623  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),   \
624                               (kmp_uint32)(sv))
625#define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
626  __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),   \
627                               (kmp_uint32)(sv))
628#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
629  __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv),            \
630                               (void *)(sv))
631
632#define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
633  __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),      \
634                              (kmp_uint8)(sv))
635#define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
636  __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),    \
637                              (kmp_uint16)(sv))
638#define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
639  __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),    \
640                              (kmp_uint32)(sv))
641#if KMP_ARCH_MIPS
642static inline bool mips_sync_bool_compare_and_swap(
643  volatile kmp_uint64 *p, kmp_uint64 cv, kmp_uint64 sv) {
644  return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
645                                                       __ATOMIC_SEQ_CST);
646}
647static inline bool mips_sync_val_compare_and_swap(
648  volatile kmp_uint64 *p, kmp_uint64 cv, kmp_uint64 sv) {
649  __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
650                                                __ATOMIC_SEQ_CST);
651  return cv;
652}
653#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
654  mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),\
655                               (kmp_uint64)(sv))
656#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
657  mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),\
658                               (kmp_uint64)(sv))
659#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
660  mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
661                              (kmp_uint64)(sv))
662#else
663#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
664  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),   \
665                               (kmp_uint64)(sv))
666#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
667  __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),   \
668                               (kmp_uint64)(sv))
669#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
670  __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),    \
671                              (kmp_uint64)(sv))
672#endif
673
674#define KMP_XCHG_FIXED8(p, v)                                                  \
675  __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
676#define KMP_XCHG_FIXED16(p, v)                                                 \
677  __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
678#define KMP_XCHG_FIXED32(p, v)                                                 \
679  __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
680#define KMP_XCHG_FIXED64(p, v)                                                 \
681  __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
682
683inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
684  kmp_int32 tmp =
685      __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
686  return *(kmp_real32 *)&tmp;
687}
688
689inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
690  kmp_int64 tmp =
691      __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
692  return *(kmp_real64 *)&tmp;
693}
694
695#else
696
697extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
698extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
699extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
700extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
701extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
702extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
703extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
704extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
705extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
706
707extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
708                                         kmp_int8 sv);
709extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
710                                           kmp_int16 sv);
711extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
712                                           kmp_int32 sv);
713extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
714                                           kmp_int64 sv);
715extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
716                                             kmp_int8 sv);
717extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
718                                               kmp_int16 cv, kmp_int16 sv);
719extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
720                                               kmp_int32 cv, kmp_int32 sv);
721extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
722                                               kmp_int64 cv, kmp_int64 sv);
723
724extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
725extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
726extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
727extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
728extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
729extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
730
731#define KMP_TEST_THEN_INC32(p)                                                 \
732  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
733#define KMP_TEST_THEN_INC_ACQ32(p)                                             \
734  __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
735#define KMP_TEST_THEN_INC64(p)                                                 \
736  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
737#define KMP_TEST_THEN_INC_ACQ64(p)                                             \
738  __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
739#define KMP_TEST_THEN_ADD4_32(p)                                               \
740  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
741#define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
742  __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
743#define KMP_TEST_THEN_ADD4_64(p)                                               \
744  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
745#define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
746  __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
747#define KMP_TEST_THEN_DEC32(p)                                                 \
748  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
749#define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
750  __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
751#define KMP_TEST_THEN_DEC64(p)                                                 \
752  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
753#define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
754  __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
755#define KMP_TEST_THEN_ADD8(p, v)                                               \
756  __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
757#define KMP_TEST_THEN_ADD32(p, v)                                              \
758  __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
759#define KMP_TEST_THEN_ADD64(p, v)                                              \
760  __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
761
762#define KMP_TEST_THEN_OR8(p, v)                                                \
763  __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
764#define KMP_TEST_THEN_AND8(p, v)                                               \
765  __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
766#define KMP_TEST_THEN_OR32(p, v)                                               \
767  __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
768#define KMP_TEST_THEN_AND32(p, v)                                              \
769  __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
770#define KMP_TEST_THEN_OR64(p, v)                                               \
771  __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
772#define KMP_TEST_THEN_AND64(p, v)                                              \
773  __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
774
775#define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
776  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv),           \
777                           (kmp_int8)(sv))
778#define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
779  __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv),           \
780                           (kmp_int8)(sv))
781#define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
782  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv),        \
783                            (kmp_int16)(sv))
784#define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
785  __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv),        \
786                            (kmp_int16)(sv))
787#define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
788  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
789                            (kmp_int32)(sv))
790#define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
791  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
792                            (kmp_int32)(sv))
793#define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
794  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
795                            (kmp_int64)(sv))
796#define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
797  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
798                            (kmp_int64)(sv))
799
800#if KMP_ARCH_X86
801#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
802  __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
803                            (kmp_int32)(sv))
804#else /* 64 bit pointers */
805#define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
806  __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
807                            (kmp_int64)(sv))
808#endif /* KMP_ARCH_X86 */
809
810#define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
811  __kmp_compare_and_store_ret8((p), (cv), (sv))
812#define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
813  __kmp_compare_and_store_ret16((p), (cv), (sv))
814#define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
815  __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv),    \
816                                (kmp_int32)(sv))
817#define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
818  __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
819                                (kmp_int64)(sv))
820
821#define KMP_XCHG_FIXED8(p, v)                                                  \
822  __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
823#define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
824#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
825#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
826#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
827#define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
828
829#endif /* KMP_ASM_INTRINS */
830
831/* ------------- relaxed consistency memory model stuff ------------------ */
832
833#if KMP_OS_WINDOWS
834#ifdef __ABSOFT_WIN
835#define KMP_MB() asm("nop")
836#define KMP_IMB() asm("nop")
837#else
838#define KMP_MB() /* _asm{ nop } */
839#define KMP_IMB() /* _asm{ nop } */
840#endif
841#endif /* KMP_OS_WINDOWS */
842
843#if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS ||     \
844    KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
845#define KMP_MB() __sync_synchronize()
846#endif
847
848#ifndef KMP_MB
849#define KMP_MB() /* nothing to do */
850#endif
851
852#ifndef KMP_IMB
853#define KMP_IMB() /* nothing to do */
854#endif
855
856#ifndef KMP_ST_REL32
857#define KMP_ST_REL32(A, D) (*(A) = (D))
858#endif
859
860#ifndef KMP_ST_REL64
861#define KMP_ST_REL64(A, D) (*(A) = (D))
862#endif
863
864#ifndef KMP_LD_ACQ32
865#define KMP_LD_ACQ32(A) (*(A))
866#endif
867
868#ifndef KMP_LD_ACQ64
869#define KMP_LD_ACQ64(A) (*(A))
870#endif
871
872/* ------------------------------------------------------------------------ */
873// FIXME - maybe this should this be
874//
875// #define TCR_4(a)    (*(volatile kmp_int32 *)(&a))
876// #define TCW_4(a,b)  (a) = (*(volatile kmp_int32 *)&(b))
877//
878// #define TCR_8(a)    (*(volatile kmp_int64 *)(a))
879// #define TCW_8(a,b)  (a) = (*(volatile kmp_int64 *)(&b))
880//
881// I'm fairly certain this is the correct thing to do, but I'm afraid
882// of performance regressions.
883
884#define TCR_1(a) (a)
885#define TCW_1(a, b) (a) = (b)
886#define TCR_4(a) (a)
887#define TCW_4(a, b) (a) = (b)
888#define TCI_4(a) (++(a))
889#define TCD_4(a) (--(a))
890#define TCR_8(a) (a)
891#define TCW_8(a, b) (a) = (b)
892#define TCI_8(a) (++(a))
893#define TCD_8(a) (--(a))
894#define TCR_SYNC_4(a) (a)
895#define TCW_SYNC_4(a, b) (a) = (b)
896#define TCX_SYNC_4(a, b, c)                                                    \
897  KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a),     \
898                              (kmp_int32)(b), (kmp_int32)(c))
899#define TCR_SYNC_8(a) (a)
900#define TCW_SYNC_8(a, b) (a) = (b)
901#define TCX_SYNC_8(a, b, c)                                                    \
902  KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a),     \
903                              (kmp_int64)(b), (kmp_int64)(c))
904
905#if KMP_ARCH_X86 || KMP_ARCH_MIPS
906// What about ARM?
907#define TCR_PTR(a) ((void *)TCR_4(a))
908#define TCW_PTR(a, b) TCW_4((a), (b))
909#define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
910#define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
911#define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
912
913#else /* 64 bit pointers */
914
915#define TCR_PTR(a) ((void *)TCR_8(a))
916#define TCW_PTR(a, b) TCW_8((a), (b))
917#define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
918#define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
919#define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
920
921#endif /* KMP_ARCH_X86 */
922
923/* If these FTN_{TRUE,FALSE} values change, may need to change several places
924   where they are used to check that language is Fortran, not C. */
925
926#ifndef FTN_TRUE
927#define FTN_TRUE TRUE
928#endif
929
930#ifndef FTN_FALSE
931#define FTN_FALSE FALSE
932#endif
933
934typedef void (*microtask_t)(int *gtid, int *npr, ...);
935
936#ifdef USE_VOLATILE_CAST
937#define VOLATILE_CAST(x) (volatile x)
938#else
939#define VOLATILE_CAST(x) (x)
940#endif
941
942#define KMP_WAIT __kmp_wait_4
943#define KMP_WAIT_PTR __kmp_wait_4_ptr
944#define KMP_EQ __kmp_eq_4
945#define KMP_NEQ __kmp_neq_4
946#define KMP_LT __kmp_lt_4
947#define KMP_GE __kmp_ge_4
948#define KMP_LE __kmp_le_4
949
950/* Workaround for Intel(R) 64 code gen bug when taking address of static array
951 * (Intel(R) 64 Tracker #138) */
952#if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
953#define STATIC_EFI2_WORKAROUND
954#else
955#define STATIC_EFI2_WORKAROUND static
956#endif
957
958// Support of BGET usage
959#ifndef KMP_USE_BGET
960#define KMP_USE_BGET 1
961#endif
962
963// Switches for OSS builds
964#ifndef USE_CMPXCHG_FIX
965#define USE_CMPXCHG_FIX 1
966#endif
967
968// Enable dynamic user lock
969#define KMP_USE_DYNAMIC_LOCK 1
970
971// Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
972// dynamic user lock is turned on
973#if KMP_USE_DYNAMIC_LOCK
974// Visual studio can't handle the asm sections in this code
975#define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
976#ifdef KMP_USE_ADAPTIVE_LOCKS
977#undef KMP_USE_ADAPTIVE_LOCKS
978#endif
979#define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
980#endif
981
982// Enable tick time conversion of ticks to seconds
983#if KMP_STATS_ENABLED
984#define KMP_HAVE_TICK_TIME                                                     \
985  (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
986#endif
987
988// Warning levels
989enum kmp_warnings_level {
990  kmp_warnings_off = 0, /* No warnings */
991  kmp_warnings_low, /* Minimal warnings (default) */
992  kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
993  kmp_warnings_verbose /* reserved */
994};
995
996#ifdef __cplusplus
997} // extern "C"
998#endif // __cplusplus
999
1000// Macros for C++11 atomic functions
1001#define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1002#define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1003
1004// For non-default load/store
1005#define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1006#define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1007#define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1008#define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1009
1010// For non-default fetch_<op>
1011#define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1012#define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1013#define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1014#define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1015#define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1016#define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1017#define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1018#define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1019
1020// Callers of the following functions cannot see the side effect on "expected".
1021template <typename T>
1022bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1023  return p->compare_exchange_strong(
1024      expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1025}
1026
1027template <typename T>
1028bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1029  return p->compare_exchange_strong(
1030      expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1031}
1032
1033template <typename T>
1034bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1035  return p->compare_exchange_strong(
1036      expected, desired, std::memory_order_release, std::memory_order_relaxed);
1037}
1038
1039#endif /* KMP_OS_H */
1040// Safe C API
1041#include "kmp_safe_c_api.h"
1042