atomic revision 296687
1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// order and consistency
21
22typedef enum memory_order
23{
24    memory_order_relaxed,
25    memory_order_consume,  // load-consume
26    memory_order_acquire,  // load-acquire
27    memory_order_release,  // store-release
28    memory_order_acq_rel,  // store-release load-acquire
29    memory_order_seq_cst   // store-release load-acquire
30} memory_order;
31
32template <class T> T kill_dependency(T y) noexcept;
33
34// lock-free property
35
36#define ATOMIC_BOOL_LOCK_FREE unspecified
37#define ATOMIC_CHAR_LOCK_FREE unspecified
38#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41#define ATOMIC_SHORT_LOCK_FREE unspecified
42#define ATOMIC_INT_LOCK_FREE unspecified
43#define ATOMIC_LONG_LOCK_FREE unspecified
44#define ATOMIC_LLONG_LOCK_FREE unspecified
45#define ATOMIC_POINTER_LOCK_FREE unspecified
46
47// flag type and operations
48
49typedef struct atomic_flag
50{
51    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54    void clear(memory_order m = memory_order_seq_cst) noexcept;
55    atomic_flag()  noexcept = default;
56    atomic_flag(const atomic_flag&) = delete;
57    atomic_flag& operator=(const atomic_flag&) = delete;
58    atomic_flag& operator=(const atomic_flag&) volatile = delete;
59} atomic_flag;
60
61bool
62    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
63
64bool
65    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
66
67bool
68    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69                                      memory_order m) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
73
74void
75    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
76
77void
78    atomic_flag_clear(atomic_flag* obj) noexcept;
79
80void
81    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
82
83void
84    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
85
86#define ATOMIC_FLAG_INIT see below
87#define ATOMIC_VAR_INIT(value) see below
88
89template <class T>
90struct atomic
91{
92    bool is_lock_free() const volatile noexcept;
93    bool is_lock_free() const noexcept;
94    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97    T load(memory_order m = memory_order_seq_cst) const noexcept;
98    operator T() const volatile noexcept;
99    operator T() const noexcept;
100    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102    bool compare_exchange_weak(T& expc, T desr,
103                               memory_order s, memory_order f) volatile noexcept;
104    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105    bool compare_exchange_strong(T& expc, T desr,
106                                 memory_order s, memory_order f) volatile noexcept;
107    bool compare_exchange_strong(T& expc, T desr,
108                                 memory_order s, memory_order f) noexcept;
109    bool compare_exchange_weak(T& expc, T desr,
110                               memory_order m = memory_order_seq_cst) volatile noexcept;
111    bool compare_exchange_weak(T& expc, T desr,
112                               memory_order m = memory_order_seq_cst) noexcept;
113    bool compare_exchange_strong(T& expc, T desr,
114                                memory_order m = memory_order_seq_cst) volatile noexcept;
115    bool compare_exchange_strong(T& expc, T desr,
116                                 memory_order m = memory_order_seq_cst) noexcept;
117
118    atomic() noexcept = default;
119    constexpr atomic(T desr) noexcept;
120    atomic(const atomic&) = delete;
121    atomic& operator=(const atomic&) = delete;
122    atomic& operator=(const atomic&) volatile = delete;
123    T operator=(T) volatile noexcept;
124    T operator=(T) noexcept;
125};
126
127template <>
128struct atomic<integral>
129{
130    bool is_lock_free() const volatile noexcept;
131    bool is_lock_free() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135    integral load(memory_order m = memory_order_seq_cst) const noexcept;
136    operator integral() const volatile noexcept;
137    operator integral() const noexcept;
138    integral exchange(integral desr,
139                      memory_order m = memory_order_seq_cst) volatile noexcept;
140    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141    bool compare_exchange_weak(integral& expc, integral desr,
142                               memory_order s, memory_order f) volatile noexcept;
143    bool compare_exchange_weak(integral& expc, integral desr,
144                               memory_order s, memory_order f) noexcept;
145    bool compare_exchange_strong(integral& expc, integral desr,
146                                 memory_order s, memory_order f) volatile noexcept;
147    bool compare_exchange_strong(integral& expc, integral desr,
148                                 memory_order s, memory_order f) noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order m = memory_order_seq_cst) volatile noexcept;
151    bool compare_exchange_weak(integral& expc, integral desr,
152                               memory_order m = memory_order_seq_cst) noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                memory_order m = memory_order_seq_cst) volatile noexcept;
155    bool compare_exchange_strong(integral& expc, integral desr,
156                                 memory_order m = memory_order_seq_cst) noexcept;
157
158    integral
159        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral
162        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
164    integral
165        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
173
174    atomic() noexcept = default;
175    constexpr atomic(integral desr) noexcept;
176    atomic(const atomic&) = delete;
177    atomic& operator=(const atomic&) = delete;
178    atomic& operator=(const atomic&) volatile = delete;
179    integral operator=(integral desr) volatile noexcept;
180    integral operator=(integral desr) noexcept;
181
182    integral operator++(int) volatile noexcept;
183    integral operator++(int) noexcept;
184    integral operator--(int) volatile noexcept;
185    integral operator--(int) noexcept;
186    integral operator++() volatile noexcept;
187    integral operator++() noexcept;
188    integral operator--() volatile noexcept;
189    integral operator--() noexcept;
190    integral operator+=(integral op) volatile noexcept;
191    integral operator+=(integral op) noexcept;
192    integral operator-=(integral op) volatile noexcept;
193    integral operator-=(integral op) noexcept;
194    integral operator&=(integral op) volatile noexcept;
195    integral operator&=(integral op) noexcept;
196    integral operator|=(integral op) volatile noexcept;
197    integral operator|=(integral op) noexcept;
198    integral operator^=(integral op) volatile noexcept;
199    integral operator^=(integral op) noexcept;
200};
201
202template <class T>
203struct atomic<T*>
204{
205    bool is_lock_free() const volatile noexcept;
206    bool is_lock_free() const noexcept;
207    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210    T* load(memory_order m = memory_order_seq_cst) const noexcept;
211    operator T*() const volatile noexcept;
212    operator T*() const noexcept;
213    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215    bool compare_exchange_weak(T*& expc, T* desr,
216                               memory_order s, memory_order f) volatile noexcept;
217    bool compare_exchange_weak(T*& expc, T* desr,
218                               memory_order s, memory_order f) noexcept;
219    bool compare_exchange_strong(T*& expc, T* desr,
220                                 memory_order s, memory_order f) volatile noexcept;
221    bool compare_exchange_strong(T*& expc, T* desr,
222                                 memory_order s, memory_order f) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order m = memory_order_seq_cst) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order m = memory_order_seq_cst) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                memory_order m = memory_order_seq_cst) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order m = memory_order_seq_cst) noexcept;
231    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
235
236    atomic() noexcept = default;
237    constexpr atomic(T* desr) noexcept;
238    atomic(const atomic&) = delete;
239    atomic& operator=(const atomic&) = delete;
240    atomic& operator=(const atomic&) volatile = delete;
241
242    T* operator=(T*) volatile noexcept;
243    T* operator=(T*) noexcept;
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256};
257
258
259template <class T>
260    bool
261    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
262
263template <class T>
264    bool
265    atomic_is_lock_free(const atomic<T>* obj) noexcept;
266
267template <class T>
268    void
269    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
270
271template <class T>
272    void
273    atomic_init(atomic<T>* obj, T desr) noexcept;
274
275template <class T>
276    void
277    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
278
279template <class T>
280    void
281    atomic_store(atomic<T>* obj, T desr) noexcept;
282
283template <class T>
284    void
285    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
286
287template <class T>
288    void
289    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
290
291template <class T>
292    T
293    atomic_load(const volatile atomic<T>* obj) noexcept;
294
295template <class T>
296    T
297    atomic_load(const atomic<T>* obj) noexcept;
298
299template <class T>
300    T
301    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
302
303template <class T>
304    T
305    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
306
307template <class T>
308    T
309    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
310
311template <class T>
312    T
313    atomic_exchange(atomic<T>* obj, T desr) noexcept;
314
315template <class T>
316    T
317    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
318
319template <class T>
320    T
321    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
322
323template <class T>
324    bool
325    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
326
327template <class T>
328    bool
329    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
330
331template <class T>
332    bool
333    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
334
335template <class T>
336    bool
337    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
338
339template <class T>
340    bool
341    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
342                                          T desr,
343                                          memory_order s, memory_order f) noexcept;
344
345template <class T>
346    bool
347    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348                                          memory_order s, memory_order f) noexcept;
349
350template <class T>
351    bool
352    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
353                                            T* expc, T desr,
354                                            memory_order s, memory_order f) noexcept;
355
356template <class T>
357    bool
358    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
359                                            T desr,
360                                            memory_order s, memory_order f) noexcept;
361
362template <class Integral>
363    Integral
364    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
365
366template <class Integral>
367    Integral
368    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
369
370template <class Integral>
371    Integral
372    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373                              memory_order m) noexcept;
374template <class Integral>
375    Integral
376    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377                              memory_order m) noexcept;
378template <class Integral>
379    Integral
380    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
381
382template <class Integral>
383    Integral
384    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
385
386template <class Integral>
387    Integral
388    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389                              memory_order m) noexcept;
390template <class Integral>
391    Integral
392    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393                              memory_order m) noexcept;
394template <class Integral>
395    Integral
396    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
397
398template <class Integral>
399    Integral
400    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
401
402template <class Integral>
403    Integral
404    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405                              memory_order m) noexcept;
406template <class Integral>
407    Integral
408    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409                              memory_order m) noexcept;
410template <class Integral>
411    Integral
412    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
413
414template <class Integral>
415    Integral
416    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
417
418template <class Integral>
419    Integral
420    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421                             memory_order m) noexcept;
422template <class Integral>
423    Integral
424    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425                             memory_order m) noexcept;
426template <class Integral>
427    Integral
428    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
429
430template <class Integral>
431    Integral
432    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
433
434template <class Integral>
435    Integral
436    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437                              memory_order m) noexcept;
438template <class Integral>
439    Integral
440    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441                              memory_order m) noexcept;
442
443template <class T>
444    T*
445    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
446
447template <class T>
448    T*
449    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
450
451template <class T>
452    T*
453    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454                              memory_order m) noexcept;
455template <class T>
456    T*
457    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
458
459template <class T>
460    T*
461    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
462
463template <class T>
464    T*
465    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
466
467template <class T>
468    T*
469    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470                              memory_order m) noexcept;
471template <class T>
472    T*
473    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
474
475// Atomics for standard typedef types
476
477typedef atomic<bool>               atomic_bool;
478typedef atomic<char>               atomic_char;
479typedef atomic<signed char>        atomic_schar;
480typedef atomic<unsigned char>      atomic_uchar;
481typedef atomic<short>              atomic_short;
482typedef atomic<unsigned short>     atomic_ushort;
483typedef atomic<int>                atomic_int;
484typedef atomic<unsigned int>       atomic_uint;
485typedef atomic<long>               atomic_long;
486typedef atomic<unsigned long>      atomic_ulong;
487typedef atomic<long long>          atomic_llong;
488typedef atomic<unsigned long long> atomic_ullong;
489typedef atomic<char16_t>           atomic_char16_t;
490typedef atomic<char32_t>           atomic_char32_t;
491typedef atomic<wchar_t>            atomic_wchar_t;
492
493typedef atomic<int_least8_t>   atomic_int_least8_t;
494typedef atomic<uint_least8_t>  atomic_uint_least8_t;
495typedef atomic<int_least16_t>  atomic_int_least16_t;
496typedef atomic<uint_least16_t> atomic_uint_least16_t;
497typedef atomic<int_least32_t>  atomic_int_least32_t;
498typedef atomic<uint_least32_t> atomic_uint_least32_t;
499typedef atomic<int_least64_t>  atomic_int_least64_t;
500typedef atomic<uint_least64_t> atomic_uint_least64_t;
501
502typedef atomic<int_fast8_t>   atomic_int_fast8_t;
503typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
504typedef atomic<int_fast16_t>  atomic_int_fast16_t;
505typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506typedef atomic<int_fast32_t>  atomic_int_fast32_t;
507typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508typedef atomic<int_fast64_t>  atomic_int_fast64_t;
509typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
510
511typedef atomic<intptr_t>  atomic_intptr_t;
512typedef atomic<uintptr_t> atomic_uintptr_t;
513typedef atomic<size_t>    atomic_size_t;
514typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515typedef atomic<intmax_t>  atomic_intmax_t;
516typedef atomic<uintmax_t> atomic_uintmax_t;
517
518// fences
519
520void atomic_thread_fence(memory_order m) noexcept;
521void atomic_signal_fence(memory_order m) noexcept;
522
523}  // std
524
525*/
526
527#include <__config>
528#include <cstddef>
529#include <cstdint>
530#include <type_traits>
531
532#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533#pragma GCC system_header
534#endif
535
536#ifdef _LIBCPP_HAS_NO_THREADS
537#error <atomic> is not supported on this single threaded system
538#else // !_LIBCPP_HAS_NO_THREADS
539
540_LIBCPP_BEGIN_NAMESPACE_STD
541
542#if !__has_feature(cxx_atomic) && _GNUC_VER < 407
543#error <atomic> is not implemented
544#else
545
546typedef enum memory_order
547{
548    memory_order_relaxed, memory_order_consume, memory_order_acquire,
549    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
550} memory_order;
551
552#if _GNUC_VER >= 407
553namespace __gcc_atomic {
554template <typename _Tp>
555struct __gcc_atomic_t {
556
557#if _GNUC_VER >= 501
558    static_assert(is_trivially_copyable<_Tp>::value,
559      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
560#endif
561
562  _LIBCPP_INLINE_VISIBILITY
563#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
564    __gcc_atomic_t() _NOEXCEPT = default;
565#else
566    __gcc_atomic_t() _NOEXCEPT : __a_value() {}
567#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
568  _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
569    : __a_value(value) {}
570  _Tp __a_value;
571};
572#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
573
574template <typename _Tp> _Tp __create();
575
576template <typename _Tp, typename _Td>
577typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
578    __test_atomic_assignable(int);
579template <typename _Tp, typename _Up>
580__two __test_atomic_assignable(...);
581
582template <typename _Tp, typename _Td>
583struct __can_assign {
584  static const bool value =
585      sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
586};
587
588static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
589  // Avoid switch statement to make this a constexpr.
590  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
591         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
592          (__order == memory_order_release ? __ATOMIC_RELEASE:
593           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
594            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
595              __ATOMIC_CONSUME))));
596}
597
598static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
599  // Avoid switch statement to make this a constexpr.
600  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
601         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
602          (__order == memory_order_release ? __ATOMIC_RELAXED:
603           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
604            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
605              __ATOMIC_CONSUME))));
606}
607
608} // namespace __gcc_atomic
609
610template <typename _Tp>
611static inline
612typename enable_if<
613    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
614__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
615  __a->__a_value = __val;
616}
617
618template <typename _Tp>
619static inline
620typename enable_if<
621    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
622     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
623__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
624  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
625  // the default operator= in an object is not volatile, a byte-by-byte copy
626  // is required.
627  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
628  volatile char* end = to + sizeof(_Tp);
629  char* from = reinterpret_cast<char*>(&__val);
630  while (to != end) {
631    *to++ = *from++;
632  }
633}
634
635template <typename _Tp>
636static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
637  __a->__a_value = __val;
638}
639
640static inline void __c11_atomic_thread_fence(memory_order __order) {
641  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
642}
643
644static inline void __c11_atomic_signal_fence(memory_order __order) {
645  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
646}
647
648template <typename _Tp>
649static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
650                                      memory_order __order) {
651  return __atomic_store(&__a->__a_value, &__val,
652                        __gcc_atomic::__to_gcc_order(__order));
653}
654
655template <typename _Tp>
656static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
657                                      memory_order __order) {
658  __atomic_store(&__a->__a_value, &__val,
659                 __gcc_atomic::__to_gcc_order(__order));
660}
661
662template <typename _Tp>
663static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
664                                    memory_order __order) {
665  _Tp __ret;
666  __atomic_load(&__a->__a_value, &__ret,
667                __gcc_atomic::__to_gcc_order(__order));
668  return __ret;
669}
670
671template <typename _Tp>
672static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
673  _Tp __ret;
674  __atomic_load(&__a->__a_value, &__ret,
675                __gcc_atomic::__to_gcc_order(__order));
676  return __ret;
677}
678
679template <typename _Tp>
680static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
681                                        _Tp __value, memory_order __order) {
682  _Tp __ret;
683  __atomic_exchange(&__a->__a_value, &__value, &__ret,
684                    __gcc_atomic::__to_gcc_order(__order));
685  return __ret;
686}
687
688template <typename _Tp>
689static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
690                                        memory_order __order) {
691  _Tp __ret;
692  __atomic_exchange(&__a->__a_value, &__value, &__ret,
693                    __gcc_atomic::__to_gcc_order(__order));
694  return __ret;
695}
696
697template <typename _Tp>
698static inline bool __c11_atomic_compare_exchange_strong(
699    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
700    memory_order __success, memory_order __failure) {
701  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
702                                   false,
703                                   __gcc_atomic::__to_gcc_order(__success),
704                                   __gcc_atomic::__to_gcc_failure_order(__failure));
705}
706
707template <typename _Tp>
708static inline bool __c11_atomic_compare_exchange_strong(
709    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
710    memory_order __failure) {
711  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
712                                   false,
713                                   __gcc_atomic::__to_gcc_order(__success),
714                                   __gcc_atomic::__to_gcc_failure_order(__failure));
715}
716
717template <typename _Tp>
718static inline bool __c11_atomic_compare_exchange_weak(
719    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
720    memory_order __success, memory_order __failure) {
721  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
722                                   true,
723                                   __gcc_atomic::__to_gcc_order(__success),
724                                   __gcc_atomic::__to_gcc_failure_order(__failure));
725}
726
727template <typename _Tp>
728static inline bool __c11_atomic_compare_exchange_weak(
729    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
730    memory_order __failure) {
731  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
732                                   true,
733                                   __gcc_atomic::__to_gcc_order(__success),
734                                   __gcc_atomic::__to_gcc_failure_order(__failure));
735}
736
737template <typename _Tp>
738struct __skip_amt { enum {value = 1}; };
739
740template <typename _Tp>
741struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
742
743// FIXME: Haven't figured out what the spec says about using arrays with
744// atomic_fetch_add. Force a failure rather than creating bad behavior.
745template <typename _Tp>
746struct __skip_amt<_Tp[]> { };
747template <typename _Tp, int n>
748struct __skip_amt<_Tp[n]> { };
749
750template <typename _Tp, typename _Td>
751static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
752                                         _Td __delta, memory_order __order) {
753  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
754                            __gcc_atomic::__to_gcc_order(__order));
755}
756
757template <typename _Tp, typename _Td>
758static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
759                                         memory_order __order) {
760  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
761                            __gcc_atomic::__to_gcc_order(__order));
762}
763
764template <typename _Tp, typename _Td>
765static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
766                                         _Td __delta, memory_order __order) {
767  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
768                            __gcc_atomic::__to_gcc_order(__order));
769}
770
771template <typename _Tp, typename _Td>
772static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
773                                         memory_order __order) {
774  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
775                            __gcc_atomic::__to_gcc_order(__order));
776}
777
778template <typename _Tp>
779static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
780                                         _Tp __pattern, memory_order __order) {
781  return __atomic_fetch_and(&__a->__a_value, __pattern,
782                            __gcc_atomic::__to_gcc_order(__order));
783}
784
785template <typename _Tp>
786static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
787                                         _Tp __pattern, memory_order __order) {
788  return __atomic_fetch_and(&__a->__a_value, __pattern,
789                            __gcc_atomic::__to_gcc_order(__order));
790}
791
792template <typename _Tp>
793static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
794                                        _Tp __pattern, memory_order __order) {
795  return __atomic_fetch_or(&__a->__a_value, __pattern,
796                           __gcc_atomic::__to_gcc_order(__order));
797}
798
799template <typename _Tp>
800static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
801                                        memory_order __order) {
802  return __atomic_fetch_or(&__a->__a_value, __pattern,
803                           __gcc_atomic::__to_gcc_order(__order));
804}
805
806template <typename _Tp>
807static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
808                                         _Tp __pattern, memory_order __order) {
809  return __atomic_fetch_xor(&__a->__a_value, __pattern,
810                            __gcc_atomic::__to_gcc_order(__order));
811}
812
813template <typename _Tp>
814static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
815                                         memory_order __order) {
816  return __atomic_fetch_xor(&__a->__a_value, __pattern,
817                            __gcc_atomic::__to_gcc_order(__order));
818}
819#endif // _GNUC_VER >= 407
820
821template <class _Tp>
822inline _LIBCPP_INLINE_VISIBILITY
823_Tp
824kill_dependency(_Tp __y) _NOEXCEPT
825{
826    return __y;
827}
828
829// general atomic<T>
830
831template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
832struct __atomic_base  // false
833{
834    mutable _Atomic(_Tp) __a_;
835
836    _LIBCPP_INLINE_VISIBILITY
837    bool is_lock_free() const volatile _NOEXCEPT
838    {
839#if __has_feature(cxx_atomic)
840    return __c11_atomic_is_lock_free(sizeof(_Tp));
841#else
842    return __atomic_is_lock_free(sizeof(_Tp), 0);
843#endif
844    }
845    _LIBCPP_INLINE_VISIBILITY
846    bool is_lock_free() const _NOEXCEPT
847        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
848    _LIBCPP_INLINE_VISIBILITY
849    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
850        {__c11_atomic_store(&__a_, __d, __m);}
851    _LIBCPP_INLINE_VISIBILITY
852    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
853        {__c11_atomic_store(&__a_, __d, __m);}
854    _LIBCPP_INLINE_VISIBILITY
855    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
856        {return __c11_atomic_load(&__a_, __m);}
857    _LIBCPP_INLINE_VISIBILITY
858    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
859        {return __c11_atomic_load(&__a_, __m);}
860    _LIBCPP_INLINE_VISIBILITY
861    operator _Tp() const volatile _NOEXCEPT {return load();}
862    _LIBCPP_INLINE_VISIBILITY
863    operator _Tp() const _NOEXCEPT          {return load();}
864    _LIBCPP_INLINE_VISIBILITY
865    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
866        {return __c11_atomic_exchange(&__a_, __d, __m);}
867    _LIBCPP_INLINE_VISIBILITY
868    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
869        {return __c11_atomic_exchange(&__a_, __d, __m);}
870    _LIBCPP_INLINE_VISIBILITY
871    bool compare_exchange_weak(_Tp& __e, _Tp __d,
872                               memory_order __s, memory_order __f) volatile _NOEXCEPT
873        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
874    _LIBCPP_INLINE_VISIBILITY
875    bool compare_exchange_weak(_Tp& __e, _Tp __d,
876                               memory_order __s, memory_order __f) _NOEXCEPT
877        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
878    _LIBCPP_INLINE_VISIBILITY
879    bool compare_exchange_strong(_Tp& __e, _Tp __d,
880                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
881        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
882    _LIBCPP_INLINE_VISIBILITY
883    bool compare_exchange_strong(_Tp& __e, _Tp __d,
884                                 memory_order __s, memory_order __f) _NOEXCEPT
885        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
886    _LIBCPP_INLINE_VISIBILITY
887    bool compare_exchange_weak(_Tp& __e, _Tp __d,
888                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
889        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
890    _LIBCPP_INLINE_VISIBILITY
891    bool compare_exchange_weak(_Tp& __e, _Tp __d,
892                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
893        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
894    _LIBCPP_INLINE_VISIBILITY
895    bool compare_exchange_strong(_Tp& __e, _Tp __d,
896                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
897        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
898    _LIBCPP_INLINE_VISIBILITY
899    bool compare_exchange_strong(_Tp& __e, _Tp __d,
900                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
901        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
902
903    _LIBCPP_INLINE_VISIBILITY
904#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
905    __atomic_base() _NOEXCEPT = default;
906#else
907    __atomic_base() _NOEXCEPT : __a_() {}
908#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
909
910    _LIBCPP_INLINE_VISIBILITY
911    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
912#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
913    __atomic_base(const __atomic_base&) = delete;
914    __atomic_base& operator=(const __atomic_base&) = delete;
915    __atomic_base& operator=(const __atomic_base&) volatile = delete;
916#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
917private:
918    __atomic_base(const __atomic_base&);
919    __atomic_base& operator=(const __atomic_base&);
920    __atomic_base& operator=(const __atomic_base&) volatile;
921#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
922};
923
924// atomic<Integral>
925
926template <class _Tp>
927struct __atomic_base<_Tp, true>
928    : public __atomic_base<_Tp, false>
929{
930    typedef __atomic_base<_Tp, false> __base;
931    _LIBCPP_INLINE_VISIBILITY
932    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
933    _LIBCPP_INLINE_VISIBILITY
934    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
935
936    _LIBCPP_INLINE_VISIBILITY
937    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
938        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
939    _LIBCPP_INLINE_VISIBILITY
940    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
941        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
942    _LIBCPP_INLINE_VISIBILITY
943    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
944        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
945    _LIBCPP_INLINE_VISIBILITY
946    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
947        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
948    _LIBCPP_INLINE_VISIBILITY
949    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
950        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
951    _LIBCPP_INLINE_VISIBILITY
952    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
953        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
954    _LIBCPP_INLINE_VISIBILITY
955    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
956        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
957    _LIBCPP_INLINE_VISIBILITY
958    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
959        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
960    _LIBCPP_INLINE_VISIBILITY
961    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
962        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
963    _LIBCPP_INLINE_VISIBILITY
964    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
965        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
966
967    _LIBCPP_INLINE_VISIBILITY
968    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
969    _LIBCPP_INLINE_VISIBILITY
970    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
971    _LIBCPP_INLINE_VISIBILITY
972    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
973    _LIBCPP_INLINE_VISIBILITY
974    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
975    _LIBCPP_INLINE_VISIBILITY
976    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
977    _LIBCPP_INLINE_VISIBILITY
978    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
979    _LIBCPP_INLINE_VISIBILITY
980    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
981    _LIBCPP_INLINE_VISIBILITY
982    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
983    _LIBCPP_INLINE_VISIBILITY
984    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
985    _LIBCPP_INLINE_VISIBILITY
986    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
987    _LIBCPP_INLINE_VISIBILITY
988    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
989    _LIBCPP_INLINE_VISIBILITY
990    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
991    _LIBCPP_INLINE_VISIBILITY
992    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
993    _LIBCPP_INLINE_VISIBILITY
994    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
995    _LIBCPP_INLINE_VISIBILITY
996    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
997    _LIBCPP_INLINE_VISIBILITY
998    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
999    _LIBCPP_INLINE_VISIBILITY
1000    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1001    _LIBCPP_INLINE_VISIBILITY
1002    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1003};
1004
1005// atomic<T>
1006
1007template <class _Tp>
1008struct atomic
1009    : public __atomic_base<_Tp>
1010{
1011    typedef __atomic_base<_Tp> __base;
1012    _LIBCPP_INLINE_VISIBILITY
1013    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1014    _LIBCPP_INLINE_VISIBILITY
1015    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1016
1017    _LIBCPP_INLINE_VISIBILITY
1018    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1019        {__base::store(__d); return __d;}
1020    _LIBCPP_INLINE_VISIBILITY
1021    _Tp operator=(_Tp __d) _NOEXCEPT
1022        {__base::store(__d); return __d;}
1023};
1024
1025// atomic<T*>
1026
1027template <class _Tp>
1028struct atomic<_Tp*>
1029    : public __atomic_base<_Tp*>
1030{
1031    typedef __atomic_base<_Tp*> __base;
1032    _LIBCPP_INLINE_VISIBILITY
1033    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1034    _LIBCPP_INLINE_VISIBILITY
1035    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1036
1037    _LIBCPP_INLINE_VISIBILITY
1038    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1039        {__base::store(__d); return __d;}
1040    _LIBCPP_INLINE_VISIBILITY
1041    _Tp* operator=(_Tp* __d) _NOEXCEPT
1042        {__base::store(__d); return __d;}
1043
1044    _LIBCPP_INLINE_VISIBILITY
1045    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1046                                                                        volatile _NOEXCEPT
1047        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1048    _LIBCPP_INLINE_VISIBILITY
1049    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1050        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1051    _LIBCPP_INLINE_VISIBILITY
1052    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1053                                                                        volatile _NOEXCEPT
1054        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1055    _LIBCPP_INLINE_VISIBILITY
1056    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1057        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1058
1059    _LIBCPP_INLINE_VISIBILITY
1060    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1061    _LIBCPP_INLINE_VISIBILITY
1062    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1063    _LIBCPP_INLINE_VISIBILITY
1064    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1065    _LIBCPP_INLINE_VISIBILITY
1066    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1067    _LIBCPP_INLINE_VISIBILITY
1068    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1069    _LIBCPP_INLINE_VISIBILITY
1070    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1071    _LIBCPP_INLINE_VISIBILITY
1072    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1073    _LIBCPP_INLINE_VISIBILITY
1074    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1075    _LIBCPP_INLINE_VISIBILITY
1076    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1077    _LIBCPP_INLINE_VISIBILITY
1078    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1079    _LIBCPP_INLINE_VISIBILITY
1080    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1081    _LIBCPP_INLINE_VISIBILITY
1082    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1083};
1084
1085// atomic_is_lock_free
1086
1087template <class _Tp>
1088inline _LIBCPP_INLINE_VISIBILITY
1089bool
1090atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1091{
1092    return __o->is_lock_free();
1093}
1094
1095template <class _Tp>
1096inline _LIBCPP_INLINE_VISIBILITY
1097bool
1098atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1099{
1100    return __o->is_lock_free();
1101}
1102
1103// atomic_init
1104
1105template <class _Tp>
1106inline _LIBCPP_INLINE_VISIBILITY
1107void
1108atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1109{
1110    __c11_atomic_init(&__o->__a_, __d);
1111}
1112
1113template <class _Tp>
1114inline _LIBCPP_INLINE_VISIBILITY
1115void
1116atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1117{
1118    __c11_atomic_init(&__o->__a_, __d);
1119}
1120
1121// atomic_store
1122
1123template <class _Tp>
1124inline _LIBCPP_INLINE_VISIBILITY
1125void
1126atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1127{
1128    __o->store(__d);
1129}
1130
1131template <class _Tp>
1132inline _LIBCPP_INLINE_VISIBILITY
1133void
1134atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1135{
1136    __o->store(__d);
1137}
1138
1139// atomic_store_explicit
1140
1141template <class _Tp>
1142inline _LIBCPP_INLINE_VISIBILITY
1143void
1144atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1145{
1146    __o->store(__d, __m);
1147}
1148
1149template <class _Tp>
1150inline _LIBCPP_INLINE_VISIBILITY
1151void
1152atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1153{
1154    __o->store(__d, __m);
1155}
1156
1157// atomic_load
1158
1159template <class _Tp>
1160inline _LIBCPP_INLINE_VISIBILITY
1161_Tp
1162atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1163{
1164    return __o->load();
1165}
1166
1167template <class _Tp>
1168inline _LIBCPP_INLINE_VISIBILITY
1169_Tp
1170atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1171{
1172    return __o->load();
1173}
1174
1175// atomic_load_explicit
1176
1177template <class _Tp>
1178inline _LIBCPP_INLINE_VISIBILITY
1179_Tp
1180atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1181{
1182    return __o->load(__m);
1183}
1184
1185template <class _Tp>
1186inline _LIBCPP_INLINE_VISIBILITY
1187_Tp
1188atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1189{
1190    return __o->load(__m);
1191}
1192
1193// atomic_exchange
1194
1195template <class _Tp>
1196inline _LIBCPP_INLINE_VISIBILITY
1197_Tp
1198atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1199{
1200    return __o->exchange(__d);
1201}
1202
1203template <class _Tp>
1204inline _LIBCPP_INLINE_VISIBILITY
1205_Tp
1206atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1207{
1208    return __o->exchange(__d);
1209}
1210
1211// atomic_exchange_explicit
1212
1213template <class _Tp>
1214inline _LIBCPP_INLINE_VISIBILITY
1215_Tp
1216atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1217{
1218    return __o->exchange(__d, __m);
1219}
1220
1221template <class _Tp>
1222inline _LIBCPP_INLINE_VISIBILITY
1223_Tp
1224atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1225{
1226    return __o->exchange(__d, __m);
1227}
1228
1229// atomic_compare_exchange_weak
1230
1231template <class _Tp>
1232inline _LIBCPP_INLINE_VISIBILITY
1233bool
1234atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1235{
1236    return __o->compare_exchange_weak(*__e, __d);
1237}
1238
1239template <class _Tp>
1240inline _LIBCPP_INLINE_VISIBILITY
1241bool
1242atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1243{
1244    return __o->compare_exchange_weak(*__e, __d);
1245}
1246
1247// atomic_compare_exchange_strong
1248
1249template <class _Tp>
1250inline _LIBCPP_INLINE_VISIBILITY
1251bool
1252atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1253{
1254    return __o->compare_exchange_strong(*__e, __d);
1255}
1256
1257template <class _Tp>
1258inline _LIBCPP_INLINE_VISIBILITY
1259bool
1260atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1261{
1262    return __o->compare_exchange_strong(*__e, __d);
1263}
1264
1265// atomic_compare_exchange_weak_explicit
1266
1267template <class _Tp>
1268inline _LIBCPP_INLINE_VISIBILITY
1269bool
1270atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1271                                      _Tp __d,
1272                                      memory_order __s, memory_order __f) _NOEXCEPT
1273{
1274    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1275}
1276
1277template <class _Tp>
1278inline _LIBCPP_INLINE_VISIBILITY
1279bool
1280atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1281                                      memory_order __s, memory_order __f) _NOEXCEPT
1282{
1283    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1284}
1285
1286// atomic_compare_exchange_strong_explicit
1287
1288template <class _Tp>
1289inline _LIBCPP_INLINE_VISIBILITY
1290bool
1291atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1292                                        _Tp* __e, _Tp __d,
1293                                        memory_order __s, memory_order __f) _NOEXCEPT
1294{
1295    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1296}
1297
1298template <class _Tp>
1299inline _LIBCPP_INLINE_VISIBILITY
1300bool
1301atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1302                                        _Tp __d,
1303                                        memory_order __s, memory_order __f) _NOEXCEPT
1304{
1305    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1306}
1307
1308// atomic_fetch_add
1309
1310template <class _Tp>
1311inline _LIBCPP_INLINE_VISIBILITY
1312typename enable_if
1313<
1314    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1315    _Tp
1316>::type
1317atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1318{
1319    return __o->fetch_add(__op);
1320}
1321
1322template <class _Tp>
1323inline _LIBCPP_INLINE_VISIBILITY
1324typename enable_if
1325<
1326    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1327    _Tp
1328>::type
1329atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1330{
1331    return __o->fetch_add(__op);
1332}
1333
1334template <class _Tp>
1335inline _LIBCPP_INLINE_VISIBILITY
1336_Tp*
1337atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1338{
1339    return __o->fetch_add(__op);
1340}
1341
1342template <class _Tp>
1343inline _LIBCPP_INLINE_VISIBILITY
1344_Tp*
1345atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1346{
1347    return __o->fetch_add(__op);
1348}
1349
1350// atomic_fetch_add_explicit
1351
1352template <class _Tp>
1353inline _LIBCPP_INLINE_VISIBILITY
1354typename enable_if
1355<
1356    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1357    _Tp
1358>::type
1359atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1360{
1361    return __o->fetch_add(__op, __m);
1362}
1363
1364template <class _Tp>
1365inline _LIBCPP_INLINE_VISIBILITY
1366typename enable_if
1367<
1368    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1369    _Tp
1370>::type
1371atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1372{
1373    return __o->fetch_add(__op, __m);
1374}
1375
1376template <class _Tp>
1377inline _LIBCPP_INLINE_VISIBILITY
1378_Tp*
1379atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1380                          memory_order __m) _NOEXCEPT
1381{
1382    return __o->fetch_add(__op, __m);
1383}
1384
1385template <class _Tp>
1386inline _LIBCPP_INLINE_VISIBILITY
1387_Tp*
1388atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1389{
1390    return __o->fetch_add(__op, __m);
1391}
1392
1393// atomic_fetch_sub
1394
1395template <class _Tp>
1396inline _LIBCPP_INLINE_VISIBILITY
1397typename enable_if
1398<
1399    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1400    _Tp
1401>::type
1402atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1403{
1404    return __o->fetch_sub(__op);
1405}
1406
1407template <class _Tp>
1408inline _LIBCPP_INLINE_VISIBILITY
1409typename enable_if
1410<
1411    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1412    _Tp
1413>::type
1414atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1415{
1416    return __o->fetch_sub(__op);
1417}
1418
1419template <class _Tp>
1420inline _LIBCPP_INLINE_VISIBILITY
1421_Tp*
1422atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1423{
1424    return __o->fetch_sub(__op);
1425}
1426
1427template <class _Tp>
1428inline _LIBCPP_INLINE_VISIBILITY
1429_Tp*
1430atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1431{
1432    return __o->fetch_sub(__op);
1433}
1434
1435// atomic_fetch_sub_explicit
1436
1437template <class _Tp>
1438inline _LIBCPP_INLINE_VISIBILITY
1439typename enable_if
1440<
1441    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1442    _Tp
1443>::type
1444atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1445{
1446    return __o->fetch_sub(__op, __m);
1447}
1448
1449template <class _Tp>
1450inline _LIBCPP_INLINE_VISIBILITY
1451typename enable_if
1452<
1453    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1454    _Tp
1455>::type
1456atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1457{
1458    return __o->fetch_sub(__op, __m);
1459}
1460
1461template <class _Tp>
1462inline _LIBCPP_INLINE_VISIBILITY
1463_Tp*
1464atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1465                          memory_order __m) _NOEXCEPT
1466{
1467    return __o->fetch_sub(__op, __m);
1468}
1469
1470template <class _Tp>
1471inline _LIBCPP_INLINE_VISIBILITY
1472_Tp*
1473atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1474{
1475    return __o->fetch_sub(__op, __m);
1476}
1477
1478// atomic_fetch_and
1479
1480template <class _Tp>
1481inline _LIBCPP_INLINE_VISIBILITY
1482typename enable_if
1483<
1484    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1485    _Tp
1486>::type
1487atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1488{
1489    return __o->fetch_and(__op);
1490}
1491
1492template <class _Tp>
1493inline _LIBCPP_INLINE_VISIBILITY
1494typename enable_if
1495<
1496    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1497    _Tp
1498>::type
1499atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1500{
1501    return __o->fetch_and(__op);
1502}
1503
1504// atomic_fetch_and_explicit
1505
1506template <class _Tp>
1507inline _LIBCPP_INLINE_VISIBILITY
1508typename enable_if
1509<
1510    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1511    _Tp
1512>::type
1513atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1514{
1515    return __o->fetch_and(__op, __m);
1516}
1517
1518template <class _Tp>
1519inline _LIBCPP_INLINE_VISIBILITY
1520typename enable_if
1521<
1522    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1523    _Tp
1524>::type
1525atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1526{
1527    return __o->fetch_and(__op, __m);
1528}
1529
1530// atomic_fetch_or
1531
1532template <class _Tp>
1533inline _LIBCPP_INLINE_VISIBILITY
1534typename enable_if
1535<
1536    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1537    _Tp
1538>::type
1539atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1540{
1541    return __o->fetch_or(__op);
1542}
1543
1544template <class _Tp>
1545inline _LIBCPP_INLINE_VISIBILITY
1546typename enable_if
1547<
1548    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1549    _Tp
1550>::type
1551atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1552{
1553    return __o->fetch_or(__op);
1554}
1555
1556// atomic_fetch_or_explicit
1557
1558template <class _Tp>
1559inline _LIBCPP_INLINE_VISIBILITY
1560typename enable_if
1561<
1562    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1563    _Tp
1564>::type
1565atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1566{
1567    return __o->fetch_or(__op, __m);
1568}
1569
1570template <class _Tp>
1571inline _LIBCPP_INLINE_VISIBILITY
1572typename enable_if
1573<
1574    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1575    _Tp
1576>::type
1577atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1578{
1579    return __o->fetch_or(__op, __m);
1580}
1581
1582// atomic_fetch_xor
1583
1584template <class _Tp>
1585inline _LIBCPP_INLINE_VISIBILITY
1586typename enable_if
1587<
1588    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1589    _Tp
1590>::type
1591atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1592{
1593    return __o->fetch_xor(__op);
1594}
1595
1596template <class _Tp>
1597inline _LIBCPP_INLINE_VISIBILITY
1598typename enable_if
1599<
1600    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1601    _Tp
1602>::type
1603atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1604{
1605    return __o->fetch_xor(__op);
1606}
1607
1608// atomic_fetch_xor_explicit
1609
1610template <class _Tp>
1611inline _LIBCPP_INLINE_VISIBILITY
1612typename enable_if
1613<
1614    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1615    _Tp
1616>::type
1617atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1618{
1619    return __o->fetch_xor(__op, __m);
1620}
1621
1622template <class _Tp>
1623inline _LIBCPP_INLINE_VISIBILITY
1624typename enable_if
1625<
1626    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1627    _Tp
1628>::type
1629atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1630{
1631    return __o->fetch_xor(__op, __m);
1632}
1633
1634// flag type and operations
1635
1636typedef struct atomic_flag
1637{
1638    _Atomic(bool) __a_;
1639
1640    _LIBCPP_INLINE_VISIBILITY
1641    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1642        {return __c11_atomic_exchange(&__a_, true, __m);}
1643    _LIBCPP_INLINE_VISIBILITY
1644    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1645        {return __c11_atomic_exchange(&__a_, true, __m);}
1646    _LIBCPP_INLINE_VISIBILITY
1647    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1648        {__c11_atomic_store(&__a_, false, __m);}
1649    _LIBCPP_INLINE_VISIBILITY
1650    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1651        {__c11_atomic_store(&__a_, false, __m);}
1652
1653    _LIBCPP_INLINE_VISIBILITY
1654#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1655    atomic_flag() _NOEXCEPT = default;
1656#else
1657    atomic_flag() _NOEXCEPT : __a_() {}
1658#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1659
1660    _LIBCPP_INLINE_VISIBILITY
1661    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1662
1663#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1664    atomic_flag(const atomic_flag&) = delete;
1665    atomic_flag& operator=(const atomic_flag&) = delete;
1666    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1667#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1668private:
1669    atomic_flag(const atomic_flag&);
1670    atomic_flag& operator=(const atomic_flag&);
1671    atomic_flag& operator=(const atomic_flag&) volatile;
1672#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1673} atomic_flag;
1674
1675inline _LIBCPP_INLINE_VISIBILITY
1676bool
1677atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1678{
1679    return __o->test_and_set();
1680}
1681
1682inline _LIBCPP_INLINE_VISIBILITY
1683bool
1684atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1685{
1686    return __o->test_and_set();
1687}
1688
1689inline _LIBCPP_INLINE_VISIBILITY
1690bool
1691atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1692{
1693    return __o->test_and_set(__m);
1694}
1695
1696inline _LIBCPP_INLINE_VISIBILITY
1697bool
1698atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1699{
1700    return __o->test_and_set(__m);
1701}
1702
1703inline _LIBCPP_INLINE_VISIBILITY
1704void
1705atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1706{
1707    __o->clear();
1708}
1709
1710inline _LIBCPP_INLINE_VISIBILITY
1711void
1712atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1713{
1714    __o->clear();
1715}
1716
1717inline _LIBCPP_INLINE_VISIBILITY
1718void
1719atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1720{
1721    __o->clear(__m);
1722}
1723
1724inline _LIBCPP_INLINE_VISIBILITY
1725void
1726atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1727{
1728    __o->clear(__m);
1729}
1730
1731// fences
1732
1733inline _LIBCPP_INLINE_VISIBILITY
1734void
1735atomic_thread_fence(memory_order __m) _NOEXCEPT
1736{
1737    __c11_atomic_thread_fence(__m);
1738}
1739
1740inline _LIBCPP_INLINE_VISIBILITY
1741void
1742atomic_signal_fence(memory_order __m) _NOEXCEPT
1743{
1744    __c11_atomic_signal_fence(__m);
1745}
1746
1747// Atomics for standard typedef types
1748
1749typedef atomic<bool>               atomic_bool;
1750typedef atomic<char>               atomic_char;
1751typedef atomic<signed char>        atomic_schar;
1752typedef atomic<unsigned char>      atomic_uchar;
1753typedef atomic<short>              atomic_short;
1754typedef atomic<unsigned short>     atomic_ushort;
1755typedef atomic<int>                atomic_int;
1756typedef atomic<unsigned int>       atomic_uint;
1757typedef atomic<long>               atomic_long;
1758typedef atomic<unsigned long>      atomic_ulong;
1759typedef atomic<long long>          atomic_llong;
1760typedef atomic<unsigned long long> atomic_ullong;
1761typedef atomic<char16_t>           atomic_char16_t;
1762typedef atomic<char32_t>           atomic_char32_t;
1763typedef atomic<wchar_t>            atomic_wchar_t;
1764
1765typedef atomic<int_least8_t>   atomic_int_least8_t;
1766typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1767typedef atomic<int_least16_t>  atomic_int_least16_t;
1768typedef atomic<uint_least16_t> atomic_uint_least16_t;
1769typedef atomic<int_least32_t>  atomic_int_least32_t;
1770typedef atomic<uint_least32_t> atomic_uint_least32_t;
1771typedef atomic<int_least64_t>  atomic_int_least64_t;
1772typedef atomic<uint_least64_t> atomic_uint_least64_t;
1773
1774typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1775typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1776typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1777typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1778typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1779typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1780typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1781typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1782
1783typedef atomic<intptr_t>  atomic_intptr_t;
1784typedef atomic<uintptr_t> atomic_uintptr_t;
1785typedef atomic<size_t>    atomic_size_t;
1786typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1787typedef atomic<intmax_t>  atomic_intmax_t;
1788typedef atomic<uintmax_t> atomic_uintmax_t;
1789
1790#define ATOMIC_FLAG_INIT {false}
1791#define ATOMIC_VAR_INIT(__v) {__v}
1792
1793// lock-free property
1794
1795#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1796#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1797#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1798#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1799#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1800#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1801#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1802#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1803#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1804#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1805
1806#endif  //  !__has_feature(cxx_atomic)
1807
1808_LIBCPP_END_NAMESPACE_STD
1809
1810#endif  // !_LIBCPP_HAS_NO_THREADS
1811
1812#endif  // _LIBCPP_ATOMIC
1813