1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// order and consistency
21
22typedef enum memory_order
23{
24    memory_order_relaxed,
25    memory_order_consume,  // load-consume
26    memory_order_acquire,  // load-acquire
27    memory_order_release,  // store-release
28    memory_order_acq_rel,  // store-release load-acquire
29    memory_order_seq_cst   // store-release load-acquire
30} memory_order;
31
32template <class T> T kill_dependency(T y) noexcept;
33
34// lock-free property
35
36#define ATOMIC_BOOL_LOCK_FREE unspecified
37#define ATOMIC_CHAR_LOCK_FREE unspecified
38#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41#define ATOMIC_SHORT_LOCK_FREE unspecified
42#define ATOMIC_INT_LOCK_FREE unspecified
43#define ATOMIC_LONG_LOCK_FREE unspecified
44#define ATOMIC_LLONG_LOCK_FREE unspecified
45#define ATOMIC_POINTER_LOCK_FREE unspecified
46
47// flag type and operations
48
49typedef struct atomic_flag
50{
51    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54    void clear(memory_order m = memory_order_seq_cst) noexcept;
55    atomic_flag()  noexcept = default;
56    atomic_flag(const atomic_flag&) = delete;
57    atomic_flag& operator=(const atomic_flag&) = delete;
58    atomic_flag& operator=(const atomic_flag&) volatile = delete;
59} atomic_flag;
60
61bool
62    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
63
64bool
65    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
66
67bool
68    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69                                      memory_order m) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
73
74void
75    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
76
77void
78    atomic_flag_clear(atomic_flag* obj) noexcept;
79
80void
81    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
82
83void
84    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
85
86#define ATOMIC_FLAG_INIT see below
87#define ATOMIC_VAR_INIT(value) see below
88
89template <class T>
90struct atomic
91{
92    bool is_lock_free() const volatile noexcept;
93    bool is_lock_free() const noexcept;
94    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97    T load(memory_order m = memory_order_seq_cst) const noexcept;
98    operator T() const volatile noexcept;
99    operator T() const noexcept;
100    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102    bool compare_exchange_weak(T& expc, T desr,
103                               memory_order s, memory_order f) volatile noexcept;
104    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105    bool compare_exchange_strong(T& expc, T desr,
106                                 memory_order s, memory_order f) volatile noexcept;
107    bool compare_exchange_strong(T& expc, T desr,
108                                 memory_order s, memory_order f) noexcept;
109    bool compare_exchange_weak(T& expc, T desr,
110                               memory_order m = memory_order_seq_cst) volatile noexcept;
111    bool compare_exchange_weak(T& expc, T desr,
112                               memory_order m = memory_order_seq_cst) noexcept;
113    bool compare_exchange_strong(T& expc, T desr,
114                                memory_order m = memory_order_seq_cst) volatile noexcept;
115    bool compare_exchange_strong(T& expc, T desr,
116                                 memory_order m = memory_order_seq_cst) noexcept;
117
118    atomic() noexcept = default;
119    constexpr atomic(T desr) noexcept;
120    atomic(const atomic&) = delete;
121    atomic& operator=(const atomic&) = delete;
122    atomic& operator=(const atomic&) volatile = delete;
123    T operator=(T) volatile noexcept;
124    T operator=(T) noexcept;
125};
126
127template <>
128struct atomic<integral>
129{
130    bool is_lock_free() const volatile noexcept;
131    bool is_lock_free() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135    integral load(memory_order m = memory_order_seq_cst) const noexcept;
136    operator integral() const volatile noexcept;
137    operator integral() const noexcept;
138    integral exchange(integral desr,
139                      memory_order m = memory_order_seq_cst) volatile noexcept;
140    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141    bool compare_exchange_weak(integral& expc, integral desr,
142                               memory_order s, memory_order f) volatile noexcept;
143    bool compare_exchange_weak(integral& expc, integral desr,
144                               memory_order s, memory_order f) noexcept;
145    bool compare_exchange_strong(integral& expc, integral desr,
146                                 memory_order s, memory_order f) volatile noexcept;
147    bool compare_exchange_strong(integral& expc, integral desr,
148                                 memory_order s, memory_order f) noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order m = memory_order_seq_cst) volatile noexcept;
151    bool compare_exchange_weak(integral& expc, integral desr,
152                               memory_order m = memory_order_seq_cst) noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                memory_order m = memory_order_seq_cst) volatile noexcept;
155    bool compare_exchange_strong(integral& expc, integral desr,
156                                 memory_order m = memory_order_seq_cst) noexcept;
157
158    integral
159        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral
162        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
164    integral
165        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
173
174    atomic() noexcept = default;
175    constexpr atomic(integral desr) noexcept;
176    atomic(const atomic&) = delete;
177    atomic& operator=(const atomic&) = delete;
178    atomic& operator=(const atomic&) volatile = delete;
179    integral operator=(integral desr) volatile noexcept;
180    integral operator=(integral desr) noexcept;
181
182    integral operator++(int) volatile noexcept;
183    integral operator++(int) noexcept;
184    integral operator--(int) volatile noexcept;
185    integral operator--(int) noexcept;
186    integral operator++() volatile noexcept;
187    integral operator++() noexcept;
188    integral operator--() volatile noexcept;
189    integral operator--() noexcept;
190    integral operator+=(integral op) volatile noexcept;
191    integral operator+=(integral op) noexcept;
192    integral operator-=(integral op) volatile noexcept;
193    integral operator-=(integral op) noexcept;
194    integral operator&=(integral op) volatile noexcept;
195    integral operator&=(integral op) noexcept;
196    integral operator|=(integral op) volatile noexcept;
197    integral operator|=(integral op) noexcept;
198    integral operator^=(integral op) volatile noexcept;
199    integral operator^=(integral op) noexcept;
200};
201
202template <class T>
203struct atomic<T*>
204{
205    bool is_lock_free() const volatile noexcept;
206    bool is_lock_free() const noexcept;
207    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210    T* load(memory_order m = memory_order_seq_cst) const noexcept;
211    operator T*() const volatile noexcept;
212    operator T*() const noexcept;
213    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215    bool compare_exchange_weak(T*& expc, T* desr,
216                               memory_order s, memory_order f) volatile noexcept;
217    bool compare_exchange_weak(T*& expc, T* desr,
218                               memory_order s, memory_order f) noexcept;
219    bool compare_exchange_strong(T*& expc, T* desr,
220                                 memory_order s, memory_order f) volatile noexcept;
221    bool compare_exchange_strong(T*& expc, T* desr,
222                                 memory_order s, memory_order f) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order m = memory_order_seq_cst) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order m = memory_order_seq_cst) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                memory_order m = memory_order_seq_cst) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order m = memory_order_seq_cst) noexcept;
231    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
235
236    atomic() noexcept = default;
237    constexpr atomic(T* desr) noexcept;
238    atomic(const atomic&) = delete;
239    atomic& operator=(const atomic&) = delete;
240    atomic& operator=(const atomic&) volatile = delete;
241
242    T* operator=(T*) volatile noexcept;
243    T* operator=(T*) noexcept;
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256};
257
258
259template <class T>
260    bool
261    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
262
263template <class T>
264    bool
265    atomic_is_lock_free(const atomic<T>* obj) noexcept;
266
267template <class T>
268    void
269    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
270
271template <class T>
272    void
273    atomic_init(atomic<T>* obj, T desr) noexcept;
274
275template <class T>
276    void
277    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
278
279template <class T>
280    void
281    atomic_store(atomic<T>* obj, T desr) noexcept;
282
283template <class T>
284    void
285    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
286
287template <class T>
288    void
289    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
290
291template <class T>
292    T
293    atomic_load(const volatile atomic<T>* obj) noexcept;
294
295template <class T>
296    T
297    atomic_load(const atomic<T>* obj) noexcept;
298
299template <class T>
300    T
301    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
302
303template <class T>
304    T
305    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
306
307template <class T>
308    T
309    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
310
311template <class T>
312    T
313    atomic_exchange(atomic<T>* obj, T desr) noexcept;
314
315template <class T>
316    T
317    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
318
319template <class T>
320    T
321    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
322
323template <class T>
324    bool
325    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
326
327template <class T>
328    bool
329    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
330
331template <class T>
332    bool
333    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
334
335template <class T>
336    bool
337    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
338
339template <class T>
340    bool
341    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
342                                          T desr,
343                                          memory_order s, memory_order f) noexcept;
344
345template <class T>
346    bool
347    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348                                          memory_order s, memory_order f) noexcept;
349
350template <class T>
351    bool
352    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
353                                            T* expc, T desr,
354                                            memory_order s, memory_order f) noexcept;
355
356template <class T>
357    bool
358    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
359                                            T desr,
360                                            memory_order s, memory_order f) noexcept;
361
362template <class Integral>
363    Integral
364    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
365
366template <class Integral>
367    Integral
368    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
369
370template <class Integral>
371    Integral
372    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373                              memory_order m) noexcept;
374template <class Integral>
375    Integral
376    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377                              memory_order m) noexcept;
378template <class Integral>
379    Integral
380    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
381
382template <class Integral>
383    Integral
384    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
385
386template <class Integral>
387    Integral
388    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389                              memory_order m) noexcept;
390template <class Integral>
391    Integral
392    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393                              memory_order m) noexcept;
394template <class Integral>
395    Integral
396    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
397
398template <class Integral>
399    Integral
400    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
401
402template <class Integral>
403    Integral
404    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405                              memory_order m) noexcept;
406template <class Integral>
407    Integral
408    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409                              memory_order m) noexcept;
410template <class Integral>
411    Integral
412    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
413
414template <class Integral>
415    Integral
416    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
417
418template <class Integral>
419    Integral
420    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421                             memory_order m) noexcept;
422template <class Integral>
423    Integral
424    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425                             memory_order m) noexcept;
426template <class Integral>
427    Integral
428    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
429
430template <class Integral>
431    Integral
432    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
433
434template <class Integral>
435    Integral
436    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437                              memory_order m) noexcept;
438template <class Integral>
439    Integral
440    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441                              memory_order m) noexcept;
442
443template <class T>
444    T*
445    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
446
447template <class T>
448    T*
449    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
450
451template <class T>
452    T*
453    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454                              memory_order m) noexcept;
455template <class T>
456    T*
457    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
458
459template <class T>
460    T*
461    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
462
463template <class T>
464    T*
465    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
466
467template <class T>
468    T*
469    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470                              memory_order m) noexcept;
471template <class T>
472    T*
473    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
474
475// Atomics for standard typedef types
476
477typedef atomic<bool>               atomic_bool;
478typedef atomic<char>               atomic_char;
479typedef atomic<signed char>        atomic_schar;
480typedef atomic<unsigned char>      atomic_uchar;
481typedef atomic<short>              atomic_short;
482typedef atomic<unsigned short>     atomic_ushort;
483typedef atomic<int>                atomic_int;
484typedef atomic<unsigned int>       atomic_uint;
485typedef atomic<long>               atomic_long;
486typedef atomic<unsigned long>      atomic_ulong;
487typedef atomic<long long>          atomic_llong;
488typedef atomic<unsigned long long> atomic_ullong;
489typedef atomic<char16_t>           atomic_char16_t;
490typedef atomic<char32_t>           atomic_char32_t;
491typedef atomic<wchar_t>            atomic_wchar_t;
492
493typedef atomic<int_least8_t>   atomic_int_least8_t;
494typedef atomic<uint_least8_t>  atomic_uint_least8_t;
495typedef atomic<int_least16_t>  atomic_int_least16_t;
496typedef atomic<uint_least16_t> atomic_uint_least16_t;
497typedef atomic<int_least32_t>  atomic_int_least32_t;
498typedef atomic<uint_least32_t> atomic_uint_least32_t;
499typedef atomic<int_least64_t>  atomic_int_least64_t;
500typedef atomic<uint_least64_t> atomic_uint_least64_t;
501
502typedef atomic<int_fast8_t>   atomic_int_fast8_t;
503typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
504typedef atomic<int_fast16_t>  atomic_int_fast16_t;
505typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506typedef atomic<int_fast32_t>  atomic_int_fast32_t;
507typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508typedef atomic<int_fast64_t>  atomic_int_fast64_t;
509typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
510
511typedef atomic<intptr_t>  atomic_intptr_t;
512typedef atomic<uintptr_t> atomic_uintptr_t;
513typedef atomic<size_t>    atomic_size_t;
514typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515typedef atomic<intmax_t>  atomic_intmax_t;
516typedef atomic<uintmax_t> atomic_uintmax_t;
517
518// fences
519
520void atomic_thread_fence(memory_order m) noexcept;
521void atomic_signal_fence(memory_order m) noexcept;
522
523}  // std
524
525*/
526
527#include <__config>
528#include <cstddef>
529#include <cstdint>
530#include <type_traits>
531
532#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533#pragma GCC system_header
534#endif
535
536#ifdef _LIBCPP_HAS_NO_THREADS
537#error <atomic> is not supported on this single threaded system
538#endif
539#if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
540#error <atomic> is not implemented
541#endif
542
543_LIBCPP_BEGIN_NAMESPACE_STD
544
545typedef enum memory_order
546{
547    memory_order_relaxed, memory_order_consume, memory_order_acquire,
548    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
549} memory_order;
550
551#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
552namespace __gcc_atomic {
553template <typename _Tp>
554struct __gcc_atomic_t {
555
556#if _GNUC_VER >= 501
557    static_assert(is_trivially_copyable<_Tp>::value,
558      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
559#endif
560
561  _LIBCPP_INLINE_VISIBILITY
562#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
563    __gcc_atomic_t() _NOEXCEPT = default;
564#else
565    __gcc_atomic_t() _NOEXCEPT : __a_value() {}
566#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
567  _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
568    : __a_value(value) {}
569  _Tp __a_value;
570};
571#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
572
573template <typename _Tp> _Tp __create();
574
575template <typename _Tp, typename _Td>
576typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
577    __test_atomic_assignable(int);
578template <typename _Tp, typename _Up>
579__two __test_atomic_assignable(...);
580
581template <typename _Tp, typename _Td>
582struct __can_assign {
583  static const bool value =
584      sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
585};
586
587static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
588  // Avoid switch statement to make this a constexpr.
589  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
590         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
591          (__order == memory_order_release ? __ATOMIC_RELEASE:
592           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
593            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
594              __ATOMIC_CONSUME))));
595}
596
597static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
598  // Avoid switch statement to make this a constexpr.
599  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
600         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
601          (__order == memory_order_release ? __ATOMIC_RELAXED:
602           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
603            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
604              __ATOMIC_CONSUME))));
605}
606
607} // namespace __gcc_atomic
608
609template <typename _Tp>
610static inline
611typename enable_if<
612    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
613__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
614  __a->__a_value = __val;
615}
616
617template <typename _Tp>
618static inline
619typename enable_if<
620    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
621     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
622__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
623  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
624  // the default operator= in an object is not volatile, a byte-by-byte copy
625  // is required.
626  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
627  volatile char* end = to + sizeof(_Tp);
628  char* from = reinterpret_cast<char*>(&__val);
629  while (to != end) {
630    *to++ = *from++;
631  }
632}
633
634template <typename _Tp>
635static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
636  __a->__a_value = __val;
637}
638
639static inline void __c11_atomic_thread_fence(memory_order __order) {
640  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
641}
642
643static inline void __c11_atomic_signal_fence(memory_order __order) {
644  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
645}
646
647template <typename _Tp>
648static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
649                                      memory_order __order) {
650  return __atomic_store(&__a->__a_value, &__val,
651                        __gcc_atomic::__to_gcc_order(__order));
652}
653
654template <typename _Tp>
655static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
656                                      memory_order __order) {
657  __atomic_store(&__a->__a_value, &__val,
658                 __gcc_atomic::__to_gcc_order(__order));
659}
660
661template <typename _Tp>
662static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
663                                    memory_order __order) {
664  _Tp __ret;
665  __atomic_load(&__a->__a_value, &__ret,
666                __gcc_atomic::__to_gcc_order(__order));
667  return __ret;
668}
669
670template <typename _Tp>
671static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
672  _Tp __ret;
673  __atomic_load(&__a->__a_value, &__ret,
674                __gcc_atomic::__to_gcc_order(__order));
675  return __ret;
676}
677
678template <typename _Tp>
679static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
680                                        _Tp __value, memory_order __order) {
681  _Tp __ret;
682  __atomic_exchange(&__a->__a_value, &__value, &__ret,
683                    __gcc_atomic::__to_gcc_order(__order));
684  return __ret;
685}
686
687template <typename _Tp>
688static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
689                                        memory_order __order) {
690  _Tp __ret;
691  __atomic_exchange(&__a->__a_value, &__value, &__ret,
692                    __gcc_atomic::__to_gcc_order(__order));
693  return __ret;
694}
695
696template <typename _Tp>
697static inline bool __c11_atomic_compare_exchange_strong(
698    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
699    memory_order __success, memory_order __failure) {
700  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
701                                   false,
702                                   __gcc_atomic::__to_gcc_order(__success),
703                                   __gcc_atomic::__to_gcc_failure_order(__failure));
704}
705
706template <typename _Tp>
707static inline bool __c11_atomic_compare_exchange_strong(
708    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
709    memory_order __failure) {
710  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
711                                   false,
712                                   __gcc_atomic::__to_gcc_order(__success),
713                                   __gcc_atomic::__to_gcc_failure_order(__failure));
714}
715
716template <typename _Tp>
717static inline bool __c11_atomic_compare_exchange_weak(
718    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
719    memory_order __success, memory_order __failure) {
720  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
721                                   true,
722                                   __gcc_atomic::__to_gcc_order(__success),
723                                   __gcc_atomic::__to_gcc_failure_order(__failure));
724}
725
726template <typename _Tp>
727static inline bool __c11_atomic_compare_exchange_weak(
728    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
729    memory_order __failure) {
730  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
731                                   true,
732                                   __gcc_atomic::__to_gcc_order(__success),
733                                   __gcc_atomic::__to_gcc_failure_order(__failure));
734}
735
736template <typename _Tp>
737struct __skip_amt { enum {value = 1}; };
738
739template <typename _Tp>
740struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
741
742// FIXME: Haven't figured out what the spec says about using arrays with
743// atomic_fetch_add. Force a failure rather than creating bad behavior.
744template <typename _Tp>
745struct __skip_amt<_Tp[]> { };
746template <typename _Tp, int n>
747struct __skip_amt<_Tp[n]> { };
748
749template <typename _Tp, typename _Td>
750static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
751                                         _Td __delta, memory_order __order) {
752  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
753                            __gcc_atomic::__to_gcc_order(__order));
754}
755
756template <typename _Tp, typename _Td>
757static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
758                                         memory_order __order) {
759  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
760                            __gcc_atomic::__to_gcc_order(__order));
761}
762
763template <typename _Tp, typename _Td>
764static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
765                                         _Td __delta, memory_order __order) {
766  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
767                            __gcc_atomic::__to_gcc_order(__order));
768}
769
770template <typename _Tp, typename _Td>
771static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
772                                         memory_order __order) {
773  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
774                            __gcc_atomic::__to_gcc_order(__order));
775}
776
777template <typename _Tp>
778static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
779                                         _Tp __pattern, memory_order __order) {
780  return __atomic_fetch_and(&__a->__a_value, __pattern,
781                            __gcc_atomic::__to_gcc_order(__order));
782}
783
784template <typename _Tp>
785static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
786                                         _Tp __pattern, memory_order __order) {
787  return __atomic_fetch_and(&__a->__a_value, __pattern,
788                            __gcc_atomic::__to_gcc_order(__order));
789}
790
791template <typename _Tp>
792static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
793                                        _Tp __pattern, memory_order __order) {
794  return __atomic_fetch_or(&__a->__a_value, __pattern,
795                           __gcc_atomic::__to_gcc_order(__order));
796}
797
798template <typename _Tp>
799static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
800                                        memory_order __order) {
801  return __atomic_fetch_or(&__a->__a_value, __pattern,
802                           __gcc_atomic::__to_gcc_order(__order));
803}
804
805template <typename _Tp>
806static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
807                                         _Tp __pattern, memory_order __order) {
808  return __atomic_fetch_xor(&__a->__a_value, __pattern,
809                            __gcc_atomic::__to_gcc_order(__order));
810}
811
812template <typename _Tp>
813static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
814                                         memory_order __order) {
815  return __atomic_fetch_xor(&__a->__a_value, __pattern,
816                            __gcc_atomic::__to_gcc_order(__order));
817}
818#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
819
820template <class _Tp>
821inline _LIBCPP_INLINE_VISIBILITY
822_Tp
823kill_dependency(_Tp __y) _NOEXCEPT
824{
825    return __y;
826}
827
828// general atomic<T>
829
830template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
831struct __atomic_base  // false
832{
833    mutable _Atomic(_Tp) __a_;
834
835    _LIBCPP_INLINE_VISIBILITY
836    bool is_lock_free() const volatile _NOEXCEPT
837    {
838#if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
839    return __c11_atomic_is_lock_free(sizeof(_Tp));
840#else
841    return __atomic_is_lock_free(sizeof(_Tp), 0);
842#endif
843    }
844    _LIBCPP_INLINE_VISIBILITY
845    bool is_lock_free() const _NOEXCEPT
846        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
847    _LIBCPP_INLINE_VISIBILITY
848    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
849        {__c11_atomic_store(&__a_, __d, __m);}
850    _LIBCPP_INLINE_VISIBILITY
851    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
852        {__c11_atomic_store(&__a_, __d, __m);}
853    _LIBCPP_INLINE_VISIBILITY
854    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
855        {return __c11_atomic_load(&__a_, __m);}
856    _LIBCPP_INLINE_VISIBILITY
857    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
858        {return __c11_atomic_load(&__a_, __m);}
859    _LIBCPP_INLINE_VISIBILITY
860    operator _Tp() const volatile _NOEXCEPT {return load();}
861    _LIBCPP_INLINE_VISIBILITY
862    operator _Tp() const _NOEXCEPT          {return load();}
863    _LIBCPP_INLINE_VISIBILITY
864    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
865        {return __c11_atomic_exchange(&__a_, __d, __m);}
866    _LIBCPP_INLINE_VISIBILITY
867    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
868        {return __c11_atomic_exchange(&__a_, __d, __m);}
869    _LIBCPP_INLINE_VISIBILITY
870    bool compare_exchange_weak(_Tp& __e, _Tp __d,
871                               memory_order __s, memory_order __f) volatile _NOEXCEPT
872        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
873    _LIBCPP_INLINE_VISIBILITY
874    bool compare_exchange_weak(_Tp& __e, _Tp __d,
875                               memory_order __s, memory_order __f) _NOEXCEPT
876        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
877    _LIBCPP_INLINE_VISIBILITY
878    bool compare_exchange_strong(_Tp& __e, _Tp __d,
879                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
880        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
881    _LIBCPP_INLINE_VISIBILITY
882    bool compare_exchange_strong(_Tp& __e, _Tp __d,
883                                 memory_order __s, memory_order __f) _NOEXCEPT
884        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
885    _LIBCPP_INLINE_VISIBILITY
886    bool compare_exchange_weak(_Tp& __e, _Tp __d,
887                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
888        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
889    _LIBCPP_INLINE_VISIBILITY
890    bool compare_exchange_weak(_Tp& __e, _Tp __d,
891                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
892        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
893    _LIBCPP_INLINE_VISIBILITY
894    bool compare_exchange_strong(_Tp& __e, _Tp __d,
895                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
896        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
897    _LIBCPP_INLINE_VISIBILITY
898    bool compare_exchange_strong(_Tp& __e, _Tp __d,
899                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
900        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
901
902    _LIBCPP_INLINE_VISIBILITY
903#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
904    __atomic_base() _NOEXCEPT = default;
905#else
906    __atomic_base() _NOEXCEPT : __a_() {}
907#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
908
909    _LIBCPP_INLINE_VISIBILITY
910    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
911#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
912    __atomic_base(const __atomic_base&) = delete;
913    __atomic_base& operator=(const __atomic_base&) = delete;
914    __atomic_base& operator=(const __atomic_base&) volatile = delete;
915#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
916private:
917    __atomic_base(const __atomic_base&);
918    __atomic_base& operator=(const __atomic_base&);
919    __atomic_base& operator=(const __atomic_base&) volatile;
920#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
921};
922
923// atomic<Integral>
924
925template <class _Tp>
926struct __atomic_base<_Tp, true>
927    : public __atomic_base<_Tp, false>
928{
929    typedef __atomic_base<_Tp, false> __base;
930    _LIBCPP_INLINE_VISIBILITY
931    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
932    _LIBCPP_INLINE_VISIBILITY
933    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
934
935    _LIBCPP_INLINE_VISIBILITY
936    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
937        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
938    _LIBCPP_INLINE_VISIBILITY
939    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
940        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
941    _LIBCPP_INLINE_VISIBILITY
942    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
943        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
944    _LIBCPP_INLINE_VISIBILITY
945    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
946        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
947    _LIBCPP_INLINE_VISIBILITY
948    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
949        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
950    _LIBCPP_INLINE_VISIBILITY
951    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
952        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
953    _LIBCPP_INLINE_VISIBILITY
954    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
955        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
956    _LIBCPP_INLINE_VISIBILITY
957    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
958        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
959    _LIBCPP_INLINE_VISIBILITY
960    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
961        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
962    _LIBCPP_INLINE_VISIBILITY
963    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
964        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
965
966    _LIBCPP_INLINE_VISIBILITY
967    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
968    _LIBCPP_INLINE_VISIBILITY
969    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
970    _LIBCPP_INLINE_VISIBILITY
971    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
972    _LIBCPP_INLINE_VISIBILITY
973    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
974    _LIBCPP_INLINE_VISIBILITY
975    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
976    _LIBCPP_INLINE_VISIBILITY
977    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
978    _LIBCPP_INLINE_VISIBILITY
979    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
980    _LIBCPP_INLINE_VISIBILITY
981    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
982    _LIBCPP_INLINE_VISIBILITY
983    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
984    _LIBCPP_INLINE_VISIBILITY
985    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
986    _LIBCPP_INLINE_VISIBILITY
987    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
988    _LIBCPP_INLINE_VISIBILITY
989    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
990    _LIBCPP_INLINE_VISIBILITY
991    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
992    _LIBCPP_INLINE_VISIBILITY
993    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
994    _LIBCPP_INLINE_VISIBILITY
995    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
996    _LIBCPP_INLINE_VISIBILITY
997    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
998    _LIBCPP_INLINE_VISIBILITY
999    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1000    _LIBCPP_INLINE_VISIBILITY
1001    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1002};
1003
1004// atomic<T>
1005
1006template <class _Tp>
1007struct atomic
1008    : public __atomic_base<_Tp>
1009{
1010    typedef __atomic_base<_Tp> __base;
1011    _LIBCPP_INLINE_VISIBILITY
1012    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1013    _LIBCPP_INLINE_VISIBILITY
1014    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1015
1016    _LIBCPP_INLINE_VISIBILITY
1017    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1018        {__base::store(__d); return __d;}
1019    _LIBCPP_INLINE_VISIBILITY
1020    _Tp operator=(_Tp __d) _NOEXCEPT
1021        {__base::store(__d); return __d;}
1022};
1023
1024// atomic<T*>
1025
1026template <class _Tp>
1027struct atomic<_Tp*>
1028    : public __atomic_base<_Tp*>
1029{
1030    typedef __atomic_base<_Tp*> __base;
1031    _LIBCPP_INLINE_VISIBILITY
1032    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1033    _LIBCPP_INLINE_VISIBILITY
1034    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1035
1036    _LIBCPP_INLINE_VISIBILITY
1037    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1038        {__base::store(__d); return __d;}
1039    _LIBCPP_INLINE_VISIBILITY
1040    _Tp* operator=(_Tp* __d) _NOEXCEPT
1041        {__base::store(__d); return __d;}
1042
1043    _LIBCPP_INLINE_VISIBILITY
1044    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1045                                                                        volatile _NOEXCEPT
1046        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1047    _LIBCPP_INLINE_VISIBILITY
1048    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1049        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1050    _LIBCPP_INLINE_VISIBILITY
1051    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1052                                                                        volatile _NOEXCEPT
1053        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1054    _LIBCPP_INLINE_VISIBILITY
1055    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1056        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1057
1058    _LIBCPP_INLINE_VISIBILITY
1059    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1060    _LIBCPP_INLINE_VISIBILITY
1061    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1062    _LIBCPP_INLINE_VISIBILITY
1063    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1064    _LIBCPP_INLINE_VISIBILITY
1065    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1066    _LIBCPP_INLINE_VISIBILITY
1067    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1068    _LIBCPP_INLINE_VISIBILITY
1069    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1070    _LIBCPP_INLINE_VISIBILITY
1071    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1072    _LIBCPP_INLINE_VISIBILITY
1073    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1074    _LIBCPP_INLINE_VISIBILITY
1075    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1076    _LIBCPP_INLINE_VISIBILITY
1077    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1078    _LIBCPP_INLINE_VISIBILITY
1079    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1080    _LIBCPP_INLINE_VISIBILITY
1081    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1082};
1083
1084// atomic_is_lock_free
1085
1086template <class _Tp>
1087inline _LIBCPP_INLINE_VISIBILITY
1088bool
1089atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1090{
1091    return __o->is_lock_free();
1092}
1093
1094template <class _Tp>
1095inline _LIBCPP_INLINE_VISIBILITY
1096bool
1097atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1098{
1099    return __o->is_lock_free();
1100}
1101
1102// atomic_init
1103
1104template <class _Tp>
1105inline _LIBCPP_INLINE_VISIBILITY
1106void
1107atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1108{
1109    __c11_atomic_init(&__o->__a_, __d);
1110}
1111
1112template <class _Tp>
1113inline _LIBCPP_INLINE_VISIBILITY
1114void
1115atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1116{
1117    __c11_atomic_init(&__o->__a_, __d);
1118}
1119
1120// atomic_store
1121
1122template <class _Tp>
1123inline _LIBCPP_INLINE_VISIBILITY
1124void
1125atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1126{
1127    __o->store(__d);
1128}
1129
1130template <class _Tp>
1131inline _LIBCPP_INLINE_VISIBILITY
1132void
1133atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1134{
1135    __o->store(__d);
1136}
1137
1138// atomic_store_explicit
1139
1140template <class _Tp>
1141inline _LIBCPP_INLINE_VISIBILITY
1142void
1143atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1144{
1145    __o->store(__d, __m);
1146}
1147
1148template <class _Tp>
1149inline _LIBCPP_INLINE_VISIBILITY
1150void
1151atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1152{
1153    __o->store(__d, __m);
1154}
1155
1156// atomic_load
1157
1158template <class _Tp>
1159inline _LIBCPP_INLINE_VISIBILITY
1160_Tp
1161atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1162{
1163    return __o->load();
1164}
1165
1166template <class _Tp>
1167inline _LIBCPP_INLINE_VISIBILITY
1168_Tp
1169atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1170{
1171    return __o->load();
1172}
1173
1174// atomic_load_explicit
1175
1176template <class _Tp>
1177inline _LIBCPP_INLINE_VISIBILITY
1178_Tp
1179atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1180{
1181    return __o->load(__m);
1182}
1183
1184template <class _Tp>
1185inline _LIBCPP_INLINE_VISIBILITY
1186_Tp
1187atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1188{
1189    return __o->load(__m);
1190}
1191
1192// atomic_exchange
1193
1194template <class _Tp>
1195inline _LIBCPP_INLINE_VISIBILITY
1196_Tp
1197atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1198{
1199    return __o->exchange(__d);
1200}
1201
1202template <class _Tp>
1203inline _LIBCPP_INLINE_VISIBILITY
1204_Tp
1205atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1206{
1207    return __o->exchange(__d);
1208}
1209
1210// atomic_exchange_explicit
1211
1212template <class _Tp>
1213inline _LIBCPP_INLINE_VISIBILITY
1214_Tp
1215atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1216{
1217    return __o->exchange(__d, __m);
1218}
1219
1220template <class _Tp>
1221inline _LIBCPP_INLINE_VISIBILITY
1222_Tp
1223atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1224{
1225    return __o->exchange(__d, __m);
1226}
1227
1228// atomic_compare_exchange_weak
1229
1230template <class _Tp>
1231inline _LIBCPP_INLINE_VISIBILITY
1232bool
1233atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1234{
1235    return __o->compare_exchange_weak(*__e, __d);
1236}
1237
1238template <class _Tp>
1239inline _LIBCPP_INLINE_VISIBILITY
1240bool
1241atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1242{
1243    return __o->compare_exchange_weak(*__e, __d);
1244}
1245
1246// atomic_compare_exchange_strong
1247
1248template <class _Tp>
1249inline _LIBCPP_INLINE_VISIBILITY
1250bool
1251atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1252{
1253    return __o->compare_exchange_strong(*__e, __d);
1254}
1255
1256template <class _Tp>
1257inline _LIBCPP_INLINE_VISIBILITY
1258bool
1259atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1260{
1261    return __o->compare_exchange_strong(*__e, __d);
1262}
1263
1264// atomic_compare_exchange_weak_explicit
1265
1266template <class _Tp>
1267inline _LIBCPP_INLINE_VISIBILITY
1268bool
1269atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1270                                      _Tp __d,
1271                                      memory_order __s, memory_order __f) _NOEXCEPT
1272{
1273    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1274}
1275
1276template <class _Tp>
1277inline _LIBCPP_INLINE_VISIBILITY
1278bool
1279atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1280                                      memory_order __s, memory_order __f) _NOEXCEPT
1281{
1282    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1283}
1284
1285// atomic_compare_exchange_strong_explicit
1286
1287template <class _Tp>
1288inline _LIBCPP_INLINE_VISIBILITY
1289bool
1290atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1291                                        _Tp* __e, _Tp __d,
1292                                        memory_order __s, memory_order __f) _NOEXCEPT
1293{
1294    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1295}
1296
1297template <class _Tp>
1298inline _LIBCPP_INLINE_VISIBILITY
1299bool
1300atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1301                                        _Tp __d,
1302                                        memory_order __s, memory_order __f) _NOEXCEPT
1303{
1304    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1305}
1306
1307// atomic_fetch_add
1308
1309template <class _Tp>
1310inline _LIBCPP_INLINE_VISIBILITY
1311typename enable_if
1312<
1313    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1314    _Tp
1315>::type
1316atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1317{
1318    return __o->fetch_add(__op);
1319}
1320
1321template <class _Tp>
1322inline _LIBCPP_INLINE_VISIBILITY
1323typename enable_if
1324<
1325    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1326    _Tp
1327>::type
1328atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1329{
1330    return __o->fetch_add(__op);
1331}
1332
1333template <class _Tp>
1334inline _LIBCPP_INLINE_VISIBILITY
1335_Tp*
1336atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1337{
1338    return __o->fetch_add(__op);
1339}
1340
1341template <class _Tp>
1342inline _LIBCPP_INLINE_VISIBILITY
1343_Tp*
1344atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1345{
1346    return __o->fetch_add(__op);
1347}
1348
1349// atomic_fetch_add_explicit
1350
1351template <class _Tp>
1352inline _LIBCPP_INLINE_VISIBILITY
1353typename enable_if
1354<
1355    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1356    _Tp
1357>::type
1358atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1359{
1360    return __o->fetch_add(__op, __m);
1361}
1362
1363template <class _Tp>
1364inline _LIBCPP_INLINE_VISIBILITY
1365typename enable_if
1366<
1367    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1368    _Tp
1369>::type
1370atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1371{
1372    return __o->fetch_add(__op, __m);
1373}
1374
1375template <class _Tp>
1376inline _LIBCPP_INLINE_VISIBILITY
1377_Tp*
1378atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1379                          memory_order __m) _NOEXCEPT
1380{
1381    return __o->fetch_add(__op, __m);
1382}
1383
1384template <class _Tp>
1385inline _LIBCPP_INLINE_VISIBILITY
1386_Tp*
1387atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1388{
1389    return __o->fetch_add(__op, __m);
1390}
1391
1392// atomic_fetch_sub
1393
1394template <class _Tp>
1395inline _LIBCPP_INLINE_VISIBILITY
1396typename enable_if
1397<
1398    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1399    _Tp
1400>::type
1401atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1402{
1403    return __o->fetch_sub(__op);
1404}
1405
1406template <class _Tp>
1407inline _LIBCPP_INLINE_VISIBILITY
1408typename enable_if
1409<
1410    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1411    _Tp
1412>::type
1413atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1414{
1415    return __o->fetch_sub(__op);
1416}
1417
1418template <class _Tp>
1419inline _LIBCPP_INLINE_VISIBILITY
1420_Tp*
1421atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1422{
1423    return __o->fetch_sub(__op);
1424}
1425
1426template <class _Tp>
1427inline _LIBCPP_INLINE_VISIBILITY
1428_Tp*
1429atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1430{
1431    return __o->fetch_sub(__op);
1432}
1433
1434// atomic_fetch_sub_explicit
1435
1436template <class _Tp>
1437inline _LIBCPP_INLINE_VISIBILITY
1438typename enable_if
1439<
1440    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1441    _Tp
1442>::type
1443atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1444{
1445    return __o->fetch_sub(__op, __m);
1446}
1447
1448template <class _Tp>
1449inline _LIBCPP_INLINE_VISIBILITY
1450typename enable_if
1451<
1452    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1453    _Tp
1454>::type
1455atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1456{
1457    return __o->fetch_sub(__op, __m);
1458}
1459
1460template <class _Tp>
1461inline _LIBCPP_INLINE_VISIBILITY
1462_Tp*
1463atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1464                          memory_order __m) _NOEXCEPT
1465{
1466    return __o->fetch_sub(__op, __m);
1467}
1468
1469template <class _Tp>
1470inline _LIBCPP_INLINE_VISIBILITY
1471_Tp*
1472atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1473{
1474    return __o->fetch_sub(__op, __m);
1475}
1476
1477// atomic_fetch_and
1478
1479template <class _Tp>
1480inline _LIBCPP_INLINE_VISIBILITY
1481typename enable_if
1482<
1483    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1484    _Tp
1485>::type
1486atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1487{
1488    return __o->fetch_and(__op);
1489}
1490
1491template <class _Tp>
1492inline _LIBCPP_INLINE_VISIBILITY
1493typename enable_if
1494<
1495    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1496    _Tp
1497>::type
1498atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1499{
1500    return __o->fetch_and(__op);
1501}
1502
1503// atomic_fetch_and_explicit
1504
1505template <class _Tp>
1506inline _LIBCPP_INLINE_VISIBILITY
1507typename enable_if
1508<
1509    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1510    _Tp
1511>::type
1512atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1513{
1514    return __o->fetch_and(__op, __m);
1515}
1516
1517template <class _Tp>
1518inline _LIBCPP_INLINE_VISIBILITY
1519typename enable_if
1520<
1521    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1522    _Tp
1523>::type
1524atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1525{
1526    return __o->fetch_and(__op, __m);
1527}
1528
1529// atomic_fetch_or
1530
1531template <class _Tp>
1532inline _LIBCPP_INLINE_VISIBILITY
1533typename enable_if
1534<
1535    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1536    _Tp
1537>::type
1538atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1539{
1540    return __o->fetch_or(__op);
1541}
1542
1543template <class _Tp>
1544inline _LIBCPP_INLINE_VISIBILITY
1545typename enable_if
1546<
1547    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1548    _Tp
1549>::type
1550atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1551{
1552    return __o->fetch_or(__op);
1553}
1554
1555// atomic_fetch_or_explicit
1556
1557template <class _Tp>
1558inline _LIBCPP_INLINE_VISIBILITY
1559typename enable_if
1560<
1561    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1562    _Tp
1563>::type
1564atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1565{
1566    return __o->fetch_or(__op, __m);
1567}
1568
1569template <class _Tp>
1570inline _LIBCPP_INLINE_VISIBILITY
1571typename enable_if
1572<
1573    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1574    _Tp
1575>::type
1576atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1577{
1578    return __o->fetch_or(__op, __m);
1579}
1580
1581// atomic_fetch_xor
1582
1583template <class _Tp>
1584inline _LIBCPP_INLINE_VISIBILITY
1585typename enable_if
1586<
1587    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1588    _Tp
1589>::type
1590atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1591{
1592    return __o->fetch_xor(__op);
1593}
1594
1595template <class _Tp>
1596inline _LIBCPP_INLINE_VISIBILITY
1597typename enable_if
1598<
1599    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1600    _Tp
1601>::type
1602atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1603{
1604    return __o->fetch_xor(__op);
1605}
1606
1607// atomic_fetch_xor_explicit
1608
1609template <class _Tp>
1610inline _LIBCPP_INLINE_VISIBILITY
1611typename enable_if
1612<
1613    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1614    _Tp
1615>::type
1616atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1617{
1618    return __o->fetch_xor(__op, __m);
1619}
1620
1621template <class _Tp>
1622inline _LIBCPP_INLINE_VISIBILITY
1623typename enable_if
1624<
1625    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1626    _Tp
1627>::type
1628atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1629{
1630    return __o->fetch_xor(__op, __m);
1631}
1632
1633// flag type and operations
1634
1635typedef struct atomic_flag
1636{
1637    _Atomic(bool) __a_;
1638
1639    _LIBCPP_INLINE_VISIBILITY
1640    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1641        {return __c11_atomic_exchange(&__a_, true, __m);}
1642    _LIBCPP_INLINE_VISIBILITY
1643    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1644        {return __c11_atomic_exchange(&__a_, true, __m);}
1645    _LIBCPP_INLINE_VISIBILITY
1646    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1647        {__c11_atomic_store(&__a_, false, __m);}
1648    _LIBCPP_INLINE_VISIBILITY
1649    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1650        {__c11_atomic_store(&__a_, false, __m);}
1651
1652    _LIBCPP_INLINE_VISIBILITY
1653#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1654    atomic_flag() _NOEXCEPT = default;
1655#else
1656    atomic_flag() _NOEXCEPT : __a_() {}
1657#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1658
1659    _LIBCPP_INLINE_VISIBILITY
1660    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1661
1662#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1663    atomic_flag(const atomic_flag&) = delete;
1664    atomic_flag& operator=(const atomic_flag&) = delete;
1665    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1666#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1667private:
1668    atomic_flag(const atomic_flag&);
1669    atomic_flag& operator=(const atomic_flag&);
1670    atomic_flag& operator=(const atomic_flag&) volatile;
1671#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1672} atomic_flag;
1673
1674inline _LIBCPP_INLINE_VISIBILITY
1675bool
1676atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1677{
1678    return __o->test_and_set();
1679}
1680
1681inline _LIBCPP_INLINE_VISIBILITY
1682bool
1683atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1684{
1685    return __o->test_and_set();
1686}
1687
1688inline _LIBCPP_INLINE_VISIBILITY
1689bool
1690atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1691{
1692    return __o->test_and_set(__m);
1693}
1694
1695inline _LIBCPP_INLINE_VISIBILITY
1696bool
1697atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1698{
1699    return __o->test_and_set(__m);
1700}
1701
1702inline _LIBCPP_INLINE_VISIBILITY
1703void
1704atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1705{
1706    __o->clear();
1707}
1708
1709inline _LIBCPP_INLINE_VISIBILITY
1710void
1711atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1712{
1713    __o->clear();
1714}
1715
1716inline _LIBCPP_INLINE_VISIBILITY
1717void
1718atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1719{
1720    __o->clear(__m);
1721}
1722
1723inline _LIBCPP_INLINE_VISIBILITY
1724void
1725atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1726{
1727    __o->clear(__m);
1728}
1729
1730// fences
1731
1732inline _LIBCPP_INLINE_VISIBILITY
1733void
1734atomic_thread_fence(memory_order __m) _NOEXCEPT
1735{
1736    __c11_atomic_thread_fence(__m);
1737}
1738
1739inline _LIBCPP_INLINE_VISIBILITY
1740void
1741atomic_signal_fence(memory_order __m) _NOEXCEPT
1742{
1743    __c11_atomic_signal_fence(__m);
1744}
1745
1746// Atomics for standard typedef types
1747
1748typedef atomic<bool>               atomic_bool;
1749typedef atomic<char>               atomic_char;
1750typedef atomic<signed char>        atomic_schar;
1751typedef atomic<unsigned char>      atomic_uchar;
1752typedef atomic<short>              atomic_short;
1753typedef atomic<unsigned short>     atomic_ushort;
1754typedef atomic<int>                atomic_int;
1755typedef atomic<unsigned int>       atomic_uint;
1756typedef atomic<long>               atomic_long;
1757typedef atomic<unsigned long>      atomic_ulong;
1758typedef atomic<long long>          atomic_llong;
1759typedef atomic<unsigned long long> atomic_ullong;
1760typedef atomic<char16_t>           atomic_char16_t;
1761typedef atomic<char32_t>           atomic_char32_t;
1762typedef atomic<wchar_t>            atomic_wchar_t;
1763
1764typedef atomic<int_least8_t>   atomic_int_least8_t;
1765typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1766typedef atomic<int_least16_t>  atomic_int_least16_t;
1767typedef atomic<uint_least16_t> atomic_uint_least16_t;
1768typedef atomic<int_least32_t>  atomic_int_least32_t;
1769typedef atomic<uint_least32_t> atomic_uint_least32_t;
1770typedef atomic<int_least64_t>  atomic_int_least64_t;
1771typedef atomic<uint_least64_t> atomic_uint_least64_t;
1772
1773typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1774typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1775typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1776typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1777typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1778typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1779typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1780typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1781
1782typedef atomic<intptr_t>  atomic_intptr_t;
1783typedef atomic<uintptr_t> atomic_uintptr_t;
1784typedef atomic<size_t>    atomic_size_t;
1785typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1786typedef atomic<intmax_t>  atomic_intmax_t;
1787typedef atomic<uintmax_t> atomic_uintmax_t;
1788
1789#define ATOMIC_FLAG_INIT {false}
1790#define ATOMIC_VAR_INIT(__v) {__v}
1791
1792#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1793#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1794#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1795#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1796#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1797#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1798#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1799#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1800#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1801#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1802
1803_LIBCPP_END_NAMESPACE_STD
1804
1805#endif  // _LIBCPP_ATOMIC
1806