atomic revision 278724
1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// order and consistency
21
22typedef enum memory_order
23{
24    memory_order_relaxed,
25    memory_order_consume,  // load-consume
26    memory_order_acquire,  // load-acquire
27    memory_order_release,  // store-release
28    memory_order_acq_rel,  // store-release load-acquire
29    memory_order_seq_cst   // store-release load-acquire
30} memory_order;
31
32template <class T> T kill_dependency(T y) noexcept;
33
34// lock-free property
35
36#define ATOMIC_BOOL_LOCK_FREE unspecified
37#define ATOMIC_CHAR_LOCK_FREE unspecified
38#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41#define ATOMIC_SHORT_LOCK_FREE unspecified
42#define ATOMIC_INT_LOCK_FREE unspecified
43#define ATOMIC_LONG_LOCK_FREE unspecified
44#define ATOMIC_LLONG_LOCK_FREE unspecified
45#define ATOMIC_POINTER_LOCK_FREE unspecified
46
47// flag type and operations
48
49typedef struct atomic_flag
50{
51    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54    void clear(memory_order m = memory_order_seq_cst) noexcept;
55    atomic_flag()  noexcept = default;
56    atomic_flag(const atomic_flag&) = delete;
57    atomic_flag& operator=(const atomic_flag&) = delete;
58    atomic_flag& operator=(const atomic_flag&) volatile = delete;
59} atomic_flag;
60
61bool
62    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
63
64bool
65    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
66
67bool
68    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69                                      memory_order m) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
73
74void
75    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
76
77void
78    atomic_flag_clear(atomic_flag* obj) noexcept;
79
80void
81    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
82
83void
84    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
85
86#define ATOMIC_FLAG_INIT see below
87#define ATOMIC_VAR_INIT(value) see below
88
89template <class T>
90struct atomic
91{
92    bool is_lock_free() const volatile noexcept;
93    bool is_lock_free() const noexcept;
94    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97    T load(memory_order m = memory_order_seq_cst) const noexcept;
98    operator T() const volatile noexcept;
99    operator T() const noexcept;
100    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102    bool compare_exchange_weak(T& expc, T desr,
103                               memory_order s, memory_order f) volatile noexcept;
104    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105    bool compare_exchange_strong(T& expc, T desr,
106                                 memory_order s, memory_order f) volatile noexcept;
107    bool compare_exchange_strong(T& expc, T desr,
108                                 memory_order s, memory_order f) noexcept;
109    bool compare_exchange_weak(T& expc, T desr,
110                               memory_order m = memory_order_seq_cst) volatile noexcept;
111    bool compare_exchange_weak(T& expc, T desr,
112                               memory_order m = memory_order_seq_cst) noexcept;
113    bool compare_exchange_strong(T& expc, T desr,
114                                memory_order m = memory_order_seq_cst) volatile noexcept;
115    bool compare_exchange_strong(T& expc, T desr,
116                                 memory_order m = memory_order_seq_cst) noexcept;
117
118    atomic() noexcept = default;
119    constexpr atomic(T desr) noexcept;
120    atomic(const atomic&) = delete;
121    atomic& operator=(const atomic&) = delete;
122    atomic& operator=(const atomic&) volatile = delete;
123    T operator=(T) volatile noexcept;
124    T operator=(T) noexcept;
125};
126
127template <>
128struct atomic<integral>
129{
130    bool is_lock_free() const volatile noexcept;
131    bool is_lock_free() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135    integral load(memory_order m = memory_order_seq_cst) const noexcept;
136    operator integral() const volatile noexcept;
137    operator integral() const noexcept;
138    integral exchange(integral desr,
139                      memory_order m = memory_order_seq_cst) volatile noexcept;
140    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141    bool compare_exchange_weak(integral& expc, integral desr,
142                               memory_order s, memory_order f) volatile noexcept;
143    bool compare_exchange_weak(integral& expc, integral desr,
144                               memory_order s, memory_order f) noexcept;
145    bool compare_exchange_strong(integral& expc, integral desr,
146                                 memory_order s, memory_order f) volatile noexcept;
147    bool compare_exchange_strong(integral& expc, integral desr,
148                                 memory_order s, memory_order f) noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order m = memory_order_seq_cst) volatile noexcept;
151    bool compare_exchange_weak(integral& expc, integral desr,
152                               memory_order m = memory_order_seq_cst) noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                memory_order m = memory_order_seq_cst) volatile noexcept;
155    bool compare_exchange_strong(integral& expc, integral desr,
156                                 memory_order m = memory_order_seq_cst) noexcept;
157
158    integral
159        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral
162        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
164    integral
165        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
173
174    atomic() noexcept = default;
175    constexpr atomic(integral desr) noexcept;
176    atomic(const atomic&) = delete;
177    atomic& operator=(const atomic&) = delete;
178    atomic& operator=(const atomic&) volatile = delete;
179    integral operator=(integral desr) volatile noexcept;
180    integral operator=(integral desr) noexcept;
181
182    integral operator++(int) volatile noexcept;
183    integral operator++(int) noexcept;
184    integral operator--(int) volatile noexcept;
185    integral operator--(int) noexcept;
186    integral operator++() volatile noexcept;
187    integral operator++() noexcept;
188    integral operator--() volatile noexcept;
189    integral operator--() noexcept;
190    integral operator+=(integral op) volatile noexcept;
191    integral operator+=(integral op) noexcept;
192    integral operator-=(integral op) volatile noexcept;
193    integral operator-=(integral op) noexcept;
194    integral operator&=(integral op) volatile noexcept;
195    integral operator&=(integral op) noexcept;
196    integral operator|=(integral op) volatile noexcept;
197    integral operator|=(integral op) noexcept;
198    integral operator^=(integral op) volatile noexcept;
199    integral operator^=(integral op) noexcept;
200};
201
202template <class T>
203struct atomic<T*>
204{
205    bool is_lock_free() const volatile noexcept;
206    bool is_lock_free() const noexcept;
207    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210    T* load(memory_order m = memory_order_seq_cst) const noexcept;
211    operator T*() const volatile noexcept;
212    operator T*() const noexcept;
213    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215    bool compare_exchange_weak(T*& expc, T* desr,
216                               memory_order s, memory_order f) volatile noexcept;
217    bool compare_exchange_weak(T*& expc, T* desr,
218                               memory_order s, memory_order f) noexcept;
219    bool compare_exchange_strong(T*& expc, T* desr,
220                                 memory_order s, memory_order f) volatile noexcept;
221    bool compare_exchange_strong(T*& expc, T* desr,
222                                 memory_order s, memory_order f) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order m = memory_order_seq_cst) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order m = memory_order_seq_cst) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                memory_order m = memory_order_seq_cst) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order m = memory_order_seq_cst) noexcept;
231    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
235
236    atomic() noexcept = default;
237    constexpr atomic(T* desr) noexcept;
238    atomic(const atomic&) = delete;
239    atomic& operator=(const atomic&) = delete;
240    atomic& operator=(const atomic&) volatile = delete;
241
242    T* operator=(T*) volatile noexcept;
243    T* operator=(T*) noexcept;
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256};
257
258
259template <class T>
260    bool
261    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
262
263template <class T>
264    bool
265    atomic_is_lock_free(const atomic<T>* obj) noexcept;
266
267template <class T>
268    void
269    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
270
271template <class T>
272    void
273    atomic_init(atomic<T>* obj, T desr) noexcept;
274
275template <class T>
276    void
277    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
278
279template <class T>
280    void
281    atomic_store(atomic<T>* obj, T desr) noexcept;
282
283template <class T>
284    void
285    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
286
287template <class T>
288    void
289    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
290
291template <class T>
292    T
293    atomic_load(const volatile atomic<T>* obj) noexcept;
294
295template <class T>
296    T
297    atomic_load(const atomic<T>* obj) noexcept;
298
299template <class T>
300    T
301    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
302
303template <class T>
304    T
305    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
306
307template <class T>
308    T
309    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
310
311template <class T>
312    T
313    atomic_exchange(atomic<T>* obj, T desr) noexcept;
314
315template <class T>
316    T
317    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
318
319template <class T>
320    T
321    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
322
323template <class T>
324    bool
325    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
326
327template <class T>
328    bool
329    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
330
331template <class T>
332    bool
333    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
334
335template <class T>
336    bool
337    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
338
339template <class T>
340    bool
341    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
342                                          T desr,
343                                          memory_order s, memory_order f) noexcept;
344
345template <class T>
346    bool
347    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348                                          memory_order s, memory_order f) noexcept;
349
350template <class T>
351    bool
352    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
353                                            T* expc, T desr,
354                                            memory_order s, memory_order f) noexcept;
355
356template <class T>
357    bool
358    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
359                                            T desr,
360                                            memory_order s, memory_order f) noexcept;
361
362template <class Integral>
363    Integral
364    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
365
366template <class Integral>
367    Integral
368    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
369
370template <class Integral>
371    Integral
372    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373                              memory_order m) noexcept;
374template <class Integral>
375    Integral
376    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377                              memory_order m) noexcept;
378template <class Integral>
379    Integral
380    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
381
382template <class Integral>
383    Integral
384    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
385
386template <class Integral>
387    Integral
388    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389                              memory_order m) noexcept;
390template <class Integral>
391    Integral
392    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393                              memory_order m) noexcept;
394template <class Integral>
395    Integral
396    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
397
398template <class Integral>
399    Integral
400    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
401
402template <class Integral>
403    Integral
404    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405                              memory_order m) noexcept;
406template <class Integral>
407    Integral
408    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409                              memory_order m) noexcept;
410template <class Integral>
411    Integral
412    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
413
414template <class Integral>
415    Integral
416    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
417
418template <class Integral>
419    Integral
420    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421                             memory_order m) noexcept;
422template <class Integral>
423    Integral
424    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425                             memory_order m) noexcept;
426template <class Integral>
427    Integral
428    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
429
430template <class Integral>
431    Integral
432    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
433
434template <class Integral>
435    Integral
436    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437                              memory_order m) noexcept;
438template <class Integral>
439    Integral
440    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441                              memory_order m) noexcept;
442
443template <class T>
444    T*
445    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
446
447template <class T>
448    T*
449    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
450
451template <class T>
452    T*
453    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454                              memory_order m) noexcept;
455template <class T>
456    T*
457    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
458
459template <class T>
460    T*
461    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
462
463template <class T>
464    T*
465    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
466
467template <class T>
468    T*
469    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470                              memory_order m) noexcept;
471template <class T>
472    T*
473    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
474
475// Atomics for standard typedef types
476
477typedef atomic<bool>               atomic_bool;
478typedef atomic<char>               atomic_char;
479typedef atomic<signed char>        atomic_schar;
480typedef atomic<unsigned char>      atomic_uchar;
481typedef atomic<short>              atomic_short;
482typedef atomic<unsigned short>     atomic_ushort;
483typedef atomic<int>                atomic_int;
484typedef atomic<unsigned int>       atomic_uint;
485typedef atomic<long>               atomic_long;
486typedef atomic<unsigned long>      atomic_ulong;
487typedef atomic<long long>          atomic_llong;
488typedef atomic<unsigned long long> atomic_ullong;
489typedef atomic<char16_t>           atomic_char16_t;
490typedef atomic<char32_t>           atomic_char32_t;
491typedef atomic<wchar_t>            atomic_wchar_t;
492
493typedef atomic<int_least8_t>   atomic_int_least8_t;
494typedef atomic<uint_least8_t>  atomic_uint_least8_t;
495typedef atomic<int_least16_t>  atomic_int_least16_t;
496typedef atomic<uint_least16_t> atomic_uint_least16_t;
497typedef atomic<int_least32_t>  atomic_int_least32_t;
498typedef atomic<uint_least32_t> atomic_uint_least32_t;
499typedef atomic<int_least64_t>  atomic_int_least64_t;
500typedef atomic<uint_least64_t> atomic_uint_least64_t;
501
502typedef atomic<int_fast8_t>   atomic_int_fast8_t;
503typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
504typedef atomic<int_fast16_t>  atomic_int_fast16_t;
505typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506typedef atomic<int_fast32_t>  atomic_int_fast32_t;
507typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508typedef atomic<int_fast64_t>  atomic_int_fast64_t;
509typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
510
511typedef atomic<intptr_t>  atomic_intptr_t;
512typedef atomic<uintptr_t> atomic_uintptr_t;
513typedef atomic<size_t>    atomic_size_t;
514typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515typedef atomic<intmax_t>  atomic_intmax_t;
516typedef atomic<uintmax_t> atomic_uintmax_t;
517
518// fences
519
520void atomic_thread_fence(memory_order m) noexcept;
521void atomic_signal_fence(memory_order m) noexcept;
522
523}  // std
524
525*/
526
527#include <__config>
528#include <cstddef>
529#include <cstdint>
530#include <type_traits>
531
532#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533#pragma GCC system_header
534#endif
535
536#ifdef _LIBCPP_HAS_NO_THREADS
537#error <atomic> is not supported on this single threaded system
538#else // !_LIBCPP_HAS_NO_THREADS
539
540_LIBCPP_BEGIN_NAMESPACE_STD
541
542#if !__has_feature(cxx_atomic) && _GNUC_VER < 407
543#error <atomic> is not implemented
544#else
545
546typedef enum memory_order
547{
548    memory_order_relaxed, memory_order_consume, memory_order_acquire,
549    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
550} memory_order;
551
552#if _GNUC_VER >= 407
553namespace __gcc_atomic {
554template <typename T>
555struct __gcc_atomic_t {
556  __gcc_atomic_t() _NOEXCEPT {}
557  explicit __gcc_atomic_t(T value) _NOEXCEPT : __a_value(value) {}
558  T __a_value;
559};
560#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
561
562template <typename T> T __create();
563
564template <typename __Tp, typename __Td>
565typename enable_if<sizeof(__Tp()->__a_value = __create<__Td>()), char>::type
566    __test_atomic_assignable(int);
567template <typename T, typename U>
568__two __test_atomic_assignable(...);
569
570template <typename __Tp, typename __Td>
571struct __can_assign {
572  static const bool value =
573      sizeof(__test_atomic_assignable<__Tp, __Td>(1)) == sizeof(char);
574};
575
576static inline constexpr int __to_gcc_order(memory_order __order) {
577  // Avoid switch statement to make this a constexpr.
578  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
579         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
580          (__order == memory_order_release ? __ATOMIC_RELEASE:
581           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
582            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
583              __ATOMIC_CONSUME))));
584}
585
586} // namespace __gcc_atomic
587
588template <typename _Tp>
589static inline
590typename enable_if<
591    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
592__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
593  __a->__a_value = __val;
594}
595
596template <typename _Tp>
597static inline
598typename enable_if<
599    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
600     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
601__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
602  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
603  // the default operator= in an object is not volatile, a byte-by-byte copy
604  // is required.
605  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
606  volatile char* end = to + sizeof(_Tp);
607  char* from = reinterpret_cast<char*>(&__val);
608  while (to != end) {
609    *to++ = *from++;
610  }
611}
612
613template <typename _Tp>
614static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
615  __a->__a_value = __val;
616}
617
618static inline void __c11_atomic_thread_fence(memory_order __order) {
619  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
620}
621
622static inline void __c11_atomic_signal_fence(memory_order __order) {
623  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
624}
625
626static inline bool __c11_atomic_is_lock_free(size_t __size) {
627  return __atomic_is_lock_free(__size, 0);
628}
629
630template <typename _Tp>
631static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
632                                      memory_order __order) {
633  return __atomic_store(&__a->__a_value, &__val,
634                        __gcc_atomic::__to_gcc_order(__order));
635}
636
637template <typename _Tp>
638static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
639                                      memory_order __order) {
640  return __atomic_store(&__a->__a_value, &__val,
641                        __gcc_atomic::__to_gcc_order(__order));
642}
643
644template <typename _Tp>
645static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
646                                    memory_order __order) {
647  _Tp __ret;
648  __atomic_load(&__a->__a_value, &__ret,
649                __gcc_atomic::__to_gcc_order(__order));
650  return __ret;
651}
652
653template <typename _Tp>
654static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
655  _Tp __ret;
656  __atomic_load(&__a->__a_value, &__ret,
657                __gcc_atomic::__to_gcc_order(__order));
658  return __ret;
659}
660
661template <typename _Tp>
662static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
663                                        _Tp __value, memory_order __order) {
664  _Tp __ret;
665  __atomic_exchange(&__a->__a_value, &__value, &__ret,
666                    __gcc_atomic::__to_gcc_order(__order));
667  return __ret;
668}
669
670template <typename _Tp>
671static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
672                                        memory_order __order) {
673  _Tp __ret;
674  __atomic_exchange(&__a->__a_value, &__value, &__ret,
675                    __gcc_atomic::__to_gcc_order(__order));
676  return __ret;
677}
678
679template <typename _Tp>
680static inline bool __c11_atomic_compare_exchange_strong(
681    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
682    memory_order __success, memory_order __failure) {
683  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
684                                   false,
685                                   __gcc_atomic::__to_gcc_order(__success),
686                                   __gcc_atomic::__to_gcc_order(__failure));
687}
688
689template <typename _Tp>
690static inline bool __c11_atomic_compare_exchange_strong(
691    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
692    memory_order __failure) {
693  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
694                                   false,
695                                   __gcc_atomic::__to_gcc_order(__success),
696                                   __gcc_atomic::__to_gcc_order(__failure));
697}
698
699template <typename _Tp>
700static inline bool __c11_atomic_compare_exchange_weak(
701    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
702    memory_order __success, memory_order __failure) {
703  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
704                                   true,
705                                   __gcc_atomic::__to_gcc_order(__success),
706                                   __gcc_atomic::__to_gcc_order(__failure));
707}
708
709template <typename _Tp>
710static inline bool __c11_atomic_compare_exchange_weak(
711    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
712    memory_order __failure) {
713  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
714                                   true,
715                                   __gcc_atomic::__to_gcc_order(__success),
716                                   __gcc_atomic::__to_gcc_order(__failure));
717}
718
719template <typename _Tp>
720struct __skip_amt { enum {value = 1}; };
721
722template <typename _Tp>
723struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
724
725// FIXME: Haven't figured out what the spec says about using arrays with
726// atomic_fetch_add. Force a failure rather than creating bad behavior.
727template <typename _Tp>
728struct __skip_amt<_Tp[]> { };
729template <typename _Tp, int n>
730struct __skip_amt<_Tp[n]> { };
731
732template <typename _Tp, typename _Td>
733static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
734                                         _Td __delta, memory_order __order) {
735  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
736                            __gcc_atomic::__to_gcc_order(__order));
737}
738
739template <typename _Tp, typename _Td>
740static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
741                                         memory_order __order) {
742  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
743                            __gcc_atomic::__to_gcc_order(__order));
744}
745
746template <typename _Tp, typename _Td>
747static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
748                                         _Td __delta, memory_order __order) {
749  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
750                            __gcc_atomic::__to_gcc_order(__order));
751}
752
753template <typename _Tp, typename _Td>
754static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
755                                         memory_order __order) {
756  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
757                            __gcc_atomic::__to_gcc_order(__order));
758}
759
760template <typename _Tp>
761static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
762                                         _Tp __pattern, memory_order __order) {
763  return __atomic_fetch_and(&__a->__a_value, __pattern,
764                            __gcc_atomic::__to_gcc_order(__order));
765}
766
767template <typename _Tp>
768static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
769                                         _Tp __pattern, memory_order __order) {
770  return __atomic_fetch_and(&__a->__a_value, __pattern,
771                            __gcc_atomic::__to_gcc_order(__order));
772}
773
774template <typename _Tp>
775static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
776                                        _Tp __pattern, memory_order __order) {
777  return __atomic_fetch_or(&__a->__a_value, __pattern,
778                           __gcc_atomic::__to_gcc_order(__order));
779}
780
781template <typename _Tp>
782static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
783                                        memory_order __order) {
784  return __atomic_fetch_or(&__a->__a_value, __pattern,
785                           __gcc_atomic::__to_gcc_order(__order));
786}
787
788template <typename _Tp>
789static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
790                                         _Tp __pattern, memory_order __order) {
791  return __atomic_fetch_xor(&__a->__a_value, __pattern,
792                            __gcc_atomic::__to_gcc_order(__order));
793}
794
795template <typename _Tp>
796static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
797                                         memory_order __order) {
798  return __atomic_fetch_xor(&__a->__a_value, __pattern,
799                            __gcc_atomic::__to_gcc_order(__order));
800}
801#endif // _GNUC_VER >= 407
802
803template <class _Tp>
804inline _LIBCPP_INLINE_VISIBILITY
805_Tp
806kill_dependency(_Tp __y) _NOEXCEPT
807{
808    return __y;
809}
810
811// general atomic<T>
812
813template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
814struct __atomic_base  // false
815{
816    mutable _Atomic(_Tp) __a_;
817
818    _LIBCPP_INLINE_VISIBILITY
819    bool is_lock_free() const volatile _NOEXCEPT
820        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
821    _LIBCPP_INLINE_VISIBILITY
822    bool is_lock_free() const _NOEXCEPT
823        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
824    _LIBCPP_INLINE_VISIBILITY
825    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
826        {__c11_atomic_store(&__a_, __d, __m);}
827    _LIBCPP_INLINE_VISIBILITY
828    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
829        {__c11_atomic_store(&__a_, __d, __m);}
830    _LIBCPP_INLINE_VISIBILITY
831    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
832        {return __c11_atomic_load(&__a_, __m);}
833    _LIBCPP_INLINE_VISIBILITY
834    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
835        {return __c11_atomic_load(&__a_, __m);}
836    _LIBCPP_INLINE_VISIBILITY
837    operator _Tp() const volatile _NOEXCEPT {return load();}
838    _LIBCPP_INLINE_VISIBILITY
839    operator _Tp() const _NOEXCEPT          {return load();}
840    _LIBCPP_INLINE_VISIBILITY
841    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
842        {return __c11_atomic_exchange(&__a_, __d, __m);}
843    _LIBCPP_INLINE_VISIBILITY
844    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
845        {return __c11_atomic_exchange(&__a_, __d, __m);}
846    _LIBCPP_INLINE_VISIBILITY
847    bool compare_exchange_weak(_Tp& __e, _Tp __d,
848                               memory_order __s, memory_order __f) volatile _NOEXCEPT
849        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
850    _LIBCPP_INLINE_VISIBILITY
851    bool compare_exchange_weak(_Tp& __e, _Tp __d,
852                               memory_order __s, memory_order __f) _NOEXCEPT
853        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
854    _LIBCPP_INLINE_VISIBILITY
855    bool compare_exchange_strong(_Tp& __e, _Tp __d,
856                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
857        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
858    _LIBCPP_INLINE_VISIBILITY
859    bool compare_exchange_strong(_Tp& __e, _Tp __d,
860                                 memory_order __s, memory_order __f) _NOEXCEPT
861        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
862    _LIBCPP_INLINE_VISIBILITY
863    bool compare_exchange_weak(_Tp& __e, _Tp __d,
864                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
865        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
866    _LIBCPP_INLINE_VISIBILITY
867    bool compare_exchange_weak(_Tp& __e, _Tp __d,
868                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
869        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
870    _LIBCPP_INLINE_VISIBILITY
871    bool compare_exchange_strong(_Tp& __e, _Tp __d,
872                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
873        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
874    _LIBCPP_INLINE_VISIBILITY
875    bool compare_exchange_strong(_Tp& __e, _Tp __d,
876                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
877        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
878
879    _LIBCPP_INLINE_VISIBILITY
880#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
881    __atomic_base() _NOEXCEPT = default;
882#else
883    __atomic_base() _NOEXCEPT : __a_() {}
884#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
885
886    _LIBCPP_INLINE_VISIBILITY
887    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
888#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
889    __atomic_base(const __atomic_base&) = delete;
890    __atomic_base& operator=(const __atomic_base&) = delete;
891    __atomic_base& operator=(const __atomic_base&) volatile = delete;
892#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
893private:
894    __atomic_base(const __atomic_base&);
895    __atomic_base& operator=(const __atomic_base&);
896    __atomic_base& operator=(const __atomic_base&) volatile;
897#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
898};
899
900// atomic<Integral>
901
902template <class _Tp>
903struct __atomic_base<_Tp, true>
904    : public __atomic_base<_Tp, false>
905{
906    typedef __atomic_base<_Tp, false> __base;
907    _LIBCPP_INLINE_VISIBILITY
908    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
909    _LIBCPP_INLINE_VISIBILITY
910    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
911
912    _LIBCPP_INLINE_VISIBILITY
913    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
914        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
915    _LIBCPP_INLINE_VISIBILITY
916    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
917        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
918    _LIBCPP_INLINE_VISIBILITY
919    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
920        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
921    _LIBCPP_INLINE_VISIBILITY
922    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
923        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
924    _LIBCPP_INLINE_VISIBILITY
925    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
926        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
927    _LIBCPP_INLINE_VISIBILITY
928    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
929        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
930    _LIBCPP_INLINE_VISIBILITY
931    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
932        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
933    _LIBCPP_INLINE_VISIBILITY
934    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
935        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
936    _LIBCPP_INLINE_VISIBILITY
937    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
938        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
939    _LIBCPP_INLINE_VISIBILITY
940    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
941        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
942
943    _LIBCPP_INLINE_VISIBILITY
944    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
945    _LIBCPP_INLINE_VISIBILITY
946    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
947    _LIBCPP_INLINE_VISIBILITY
948    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
949    _LIBCPP_INLINE_VISIBILITY
950    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
951    _LIBCPP_INLINE_VISIBILITY
952    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
953    _LIBCPP_INLINE_VISIBILITY
954    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
955    _LIBCPP_INLINE_VISIBILITY
956    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
957    _LIBCPP_INLINE_VISIBILITY
958    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
959    _LIBCPP_INLINE_VISIBILITY
960    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
961    _LIBCPP_INLINE_VISIBILITY
962    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
963    _LIBCPP_INLINE_VISIBILITY
964    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
965    _LIBCPP_INLINE_VISIBILITY
966    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
967    _LIBCPP_INLINE_VISIBILITY
968    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
969    _LIBCPP_INLINE_VISIBILITY
970    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
971    _LIBCPP_INLINE_VISIBILITY
972    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
973    _LIBCPP_INLINE_VISIBILITY
974    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
975    _LIBCPP_INLINE_VISIBILITY
976    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
977    _LIBCPP_INLINE_VISIBILITY
978    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
979};
980
981// atomic<T>
982
983template <class _Tp>
984struct atomic
985    : public __atomic_base<_Tp>
986{
987    typedef __atomic_base<_Tp> __base;
988    _LIBCPP_INLINE_VISIBILITY
989    atomic() _NOEXCEPT _LIBCPP_DEFAULT
990    _LIBCPP_INLINE_VISIBILITY
991    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
992
993    _LIBCPP_INLINE_VISIBILITY
994    _Tp operator=(_Tp __d) volatile _NOEXCEPT
995        {__base::store(__d); return __d;}
996    _LIBCPP_INLINE_VISIBILITY
997    _Tp operator=(_Tp __d) _NOEXCEPT
998        {__base::store(__d); return __d;}
999};
1000
1001// atomic<T*>
1002
1003template <class _Tp>
1004struct atomic<_Tp*>
1005    : public __atomic_base<_Tp*>
1006{
1007    typedef __atomic_base<_Tp*> __base;
1008    _LIBCPP_INLINE_VISIBILITY
1009    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1010    _LIBCPP_INLINE_VISIBILITY
1011    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1012
1013    _LIBCPP_INLINE_VISIBILITY
1014    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1015        {__base::store(__d); return __d;}
1016    _LIBCPP_INLINE_VISIBILITY
1017    _Tp* operator=(_Tp* __d) _NOEXCEPT
1018        {__base::store(__d); return __d;}
1019
1020    _LIBCPP_INLINE_VISIBILITY
1021    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1022                                                                        volatile _NOEXCEPT
1023        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1024    _LIBCPP_INLINE_VISIBILITY
1025    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1026        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1027    _LIBCPP_INLINE_VISIBILITY
1028    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1029                                                                        volatile _NOEXCEPT
1030        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1031    _LIBCPP_INLINE_VISIBILITY
1032    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1033        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1034
1035    _LIBCPP_INLINE_VISIBILITY
1036    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1037    _LIBCPP_INLINE_VISIBILITY
1038    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1039    _LIBCPP_INLINE_VISIBILITY
1040    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1041    _LIBCPP_INLINE_VISIBILITY
1042    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1043    _LIBCPP_INLINE_VISIBILITY
1044    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1045    _LIBCPP_INLINE_VISIBILITY
1046    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1047    _LIBCPP_INLINE_VISIBILITY
1048    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1049    _LIBCPP_INLINE_VISIBILITY
1050    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1051    _LIBCPP_INLINE_VISIBILITY
1052    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1053    _LIBCPP_INLINE_VISIBILITY
1054    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1055    _LIBCPP_INLINE_VISIBILITY
1056    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1057    _LIBCPP_INLINE_VISIBILITY
1058    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1059};
1060
1061// atomic_is_lock_free
1062
1063template <class _Tp>
1064inline _LIBCPP_INLINE_VISIBILITY
1065bool
1066atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1067{
1068    return __o->is_lock_free();
1069}
1070
1071template <class _Tp>
1072inline _LIBCPP_INLINE_VISIBILITY
1073bool
1074atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1075{
1076    return __o->is_lock_free();
1077}
1078
1079// atomic_init
1080
1081template <class _Tp>
1082inline _LIBCPP_INLINE_VISIBILITY
1083void
1084atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1085{
1086    __c11_atomic_init(&__o->__a_, __d);
1087}
1088
1089template <class _Tp>
1090inline _LIBCPP_INLINE_VISIBILITY
1091void
1092atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1093{
1094    __c11_atomic_init(&__o->__a_, __d);
1095}
1096
1097// atomic_store
1098
1099template <class _Tp>
1100inline _LIBCPP_INLINE_VISIBILITY
1101void
1102atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1103{
1104    __o->store(__d);
1105}
1106
1107template <class _Tp>
1108inline _LIBCPP_INLINE_VISIBILITY
1109void
1110atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1111{
1112    __o->store(__d);
1113}
1114
1115// atomic_store_explicit
1116
1117template <class _Tp>
1118inline _LIBCPP_INLINE_VISIBILITY
1119void
1120atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1121{
1122    __o->store(__d, __m);
1123}
1124
1125template <class _Tp>
1126inline _LIBCPP_INLINE_VISIBILITY
1127void
1128atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1129{
1130    __o->store(__d, __m);
1131}
1132
1133// atomic_load
1134
1135template <class _Tp>
1136inline _LIBCPP_INLINE_VISIBILITY
1137_Tp
1138atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1139{
1140    return __o->load();
1141}
1142
1143template <class _Tp>
1144inline _LIBCPP_INLINE_VISIBILITY
1145_Tp
1146atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1147{
1148    return __o->load();
1149}
1150
1151// atomic_load_explicit
1152
1153template <class _Tp>
1154inline _LIBCPP_INLINE_VISIBILITY
1155_Tp
1156atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1157{
1158    return __o->load(__m);
1159}
1160
1161template <class _Tp>
1162inline _LIBCPP_INLINE_VISIBILITY
1163_Tp
1164atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1165{
1166    return __o->load(__m);
1167}
1168
1169// atomic_exchange
1170
1171template <class _Tp>
1172inline _LIBCPP_INLINE_VISIBILITY
1173_Tp
1174atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1175{
1176    return __o->exchange(__d);
1177}
1178
1179template <class _Tp>
1180inline _LIBCPP_INLINE_VISIBILITY
1181_Tp
1182atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1183{
1184    return __o->exchange(__d);
1185}
1186
1187// atomic_exchange_explicit
1188
1189template <class _Tp>
1190inline _LIBCPP_INLINE_VISIBILITY
1191_Tp
1192atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1193{
1194    return __o->exchange(__d, __m);
1195}
1196
1197template <class _Tp>
1198inline _LIBCPP_INLINE_VISIBILITY
1199_Tp
1200atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1201{
1202    return __o->exchange(__d, __m);
1203}
1204
1205// atomic_compare_exchange_weak
1206
1207template <class _Tp>
1208inline _LIBCPP_INLINE_VISIBILITY
1209bool
1210atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1211{
1212    return __o->compare_exchange_weak(*__e, __d);
1213}
1214
1215template <class _Tp>
1216inline _LIBCPP_INLINE_VISIBILITY
1217bool
1218atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1219{
1220    return __o->compare_exchange_weak(*__e, __d);
1221}
1222
1223// atomic_compare_exchange_strong
1224
1225template <class _Tp>
1226inline _LIBCPP_INLINE_VISIBILITY
1227bool
1228atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1229{
1230    return __o->compare_exchange_strong(*__e, __d);
1231}
1232
1233template <class _Tp>
1234inline _LIBCPP_INLINE_VISIBILITY
1235bool
1236atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1237{
1238    return __o->compare_exchange_strong(*__e, __d);
1239}
1240
1241// atomic_compare_exchange_weak_explicit
1242
1243template <class _Tp>
1244inline _LIBCPP_INLINE_VISIBILITY
1245bool
1246atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1247                                      _Tp __d,
1248                                      memory_order __s, memory_order __f) _NOEXCEPT
1249{
1250    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1251}
1252
1253template <class _Tp>
1254inline _LIBCPP_INLINE_VISIBILITY
1255bool
1256atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1257                                      memory_order __s, memory_order __f) _NOEXCEPT
1258{
1259    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1260}
1261
1262// atomic_compare_exchange_strong_explicit
1263
1264template <class _Tp>
1265inline _LIBCPP_INLINE_VISIBILITY
1266bool
1267atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1268                                        _Tp* __e, _Tp __d,
1269                                        memory_order __s, memory_order __f) _NOEXCEPT
1270{
1271    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1272}
1273
1274template <class _Tp>
1275inline _LIBCPP_INLINE_VISIBILITY
1276bool
1277atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1278                                        _Tp __d,
1279                                        memory_order __s, memory_order __f) _NOEXCEPT
1280{
1281    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1282}
1283
1284// atomic_fetch_add
1285
1286template <class _Tp>
1287inline _LIBCPP_INLINE_VISIBILITY
1288typename enable_if
1289<
1290    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1291    _Tp
1292>::type
1293atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1294{
1295    return __o->fetch_add(__op);
1296}
1297
1298template <class _Tp>
1299inline _LIBCPP_INLINE_VISIBILITY
1300typename enable_if
1301<
1302    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1303    _Tp
1304>::type
1305atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1306{
1307    return __o->fetch_add(__op);
1308}
1309
1310template <class _Tp>
1311inline _LIBCPP_INLINE_VISIBILITY
1312_Tp*
1313atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1314{
1315    return __o->fetch_add(__op);
1316}
1317
1318template <class _Tp>
1319inline _LIBCPP_INLINE_VISIBILITY
1320_Tp*
1321atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1322{
1323    return __o->fetch_add(__op);
1324}
1325
1326// atomic_fetch_add_explicit
1327
1328template <class _Tp>
1329inline _LIBCPP_INLINE_VISIBILITY
1330typename enable_if
1331<
1332    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1333    _Tp
1334>::type
1335atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1336{
1337    return __o->fetch_add(__op, __m);
1338}
1339
1340template <class _Tp>
1341inline _LIBCPP_INLINE_VISIBILITY
1342typename enable_if
1343<
1344    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1345    _Tp
1346>::type
1347atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1348{
1349    return __o->fetch_add(__op, __m);
1350}
1351
1352template <class _Tp>
1353inline _LIBCPP_INLINE_VISIBILITY
1354_Tp*
1355atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1356                          memory_order __m) _NOEXCEPT
1357{
1358    return __o->fetch_add(__op, __m);
1359}
1360
1361template <class _Tp>
1362inline _LIBCPP_INLINE_VISIBILITY
1363_Tp*
1364atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1365{
1366    return __o->fetch_add(__op, __m);
1367}
1368
1369// atomic_fetch_sub
1370
1371template <class _Tp>
1372inline _LIBCPP_INLINE_VISIBILITY
1373typename enable_if
1374<
1375    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1376    _Tp
1377>::type
1378atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1379{
1380    return __o->fetch_sub(__op);
1381}
1382
1383template <class _Tp>
1384inline _LIBCPP_INLINE_VISIBILITY
1385typename enable_if
1386<
1387    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1388    _Tp
1389>::type
1390atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1391{
1392    return __o->fetch_sub(__op);
1393}
1394
1395template <class _Tp>
1396inline _LIBCPP_INLINE_VISIBILITY
1397_Tp*
1398atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1399{
1400    return __o->fetch_sub(__op);
1401}
1402
1403template <class _Tp>
1404inline _LIBCPP_INLINE_VISIBILITY
1405_Tp*
1406atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1407{
1408    return __o->fetch_sub(__op);
1409}
1410
1411// atomic_fetch_sub_explicit
1412
1413template <class _Tp>
1414inline _LIBCPP_INLINE_VISIBILITY
1415typename enable_if
1416<
1417    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1418    _Tp
1419>::type
1420atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1421{
1422    return __o->fetch_sub(__op, __m);
1423}
1424
1425template <class _Tp>
1426inline _LIBCPP_INLINE_VISIBILITY
1427typename enable_if
1428<
1429    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1430    _Tp
1431>::type
1432atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1433{
1434    return __o->fetch_sub(__op, __m);
1435}
1436
1437template <class _Tp>
1438inline _LIBCPP_INLINE_VISIBILITY
1439_Tp*
1440atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1441                          memory_order __m) _NOEXCEPT
1442{
1443    return __o->fetch_sub(__op, __m);
1444}
1445
1446template <class _Tp>
1447inline _LIBCPP_INLINE_VISIBILITY
1448_Tp*
1449atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1450{
1451    return __o->fetch_sub(__op, __m);
1452}
1453
1454// atomic_fetch_and
1455
1456template <class _Tp>
1457inline _LIBCPP_INLINE_VISIBILITY
1458typename enable_if
1459<
1460    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1461    _Tp
1462>::type
1463atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1464{
1465    return __o->fetch_and(__op);
1466}
1467
1468template <class _Tp>
1469inline _LIBCPP_INLINE_VISIBILITY
1470typename enable_if
1471<
1472    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1473    _Tp
1474>::type
1475atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1476{
1477    return __o->fetch_and(__op);
1478}
1479
1480// atomic_fetch_and_explicit
1481
1482template <class _Tp>
1483inline _LIBCPP_INLINE_VISIBILITY
1484typename enable_if
1485<
1486    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1487    _Tp
1488>::type
1489atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1490{
1491    return __o->fetch_and(__op, __m);
1492}
1493
1494template <class _Tp>
1495inline _LIBCPP_INLINE_VISIBILITY
1496typename enable_if
1497<
1498    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1499    _Tp
1500>::type
1501atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1502{
1503    return __o->fetch_and(__op, __m);
1504}
1505
1506// atomic_fetch_or
1507
1508template <class _Tp>
1509inline _LIBCPP_INLINE_VISIBILITY
1510typename enable_if
1511<
1512    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1513    _Tp
1514>::type
1515atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1516{
1517    return __o->fetch_or(__op);
1518}
1519
1520template <class _Tp>
1521inline _LIBCPP_INLINE_VISIBILITY
1522typename enable_if
1523<
1524    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1525    _Tp
1526>::type
1527atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1528{
1529    return __o->fetch_or(__op);
1530}
1531
1532// atomic_fetch_or_explicit
1533
1534template <class _Tp>
1535inline _LIBCPP_INLINE_VISIBILITY
1536typename enable_if
1537<
1538    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1539    _Tp
1540>::type
1541atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1542{
1543    return __o->fetch_or(__op, __m);
1544}
1545
1546template <class _Tp>
1547inline _LIBCPP_INLINE_VISIBILITY
1548typename enable_if
1549<
1550    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1551    _Tp
1552>::type
1553atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1554{
1555    return __o->fetch_or(__op, __m);
1556}
1557
1558// atomic_fetch_xor
1559
1560template <class _Tp>
1561inline _LIBCPP_INLINE_VISIBILITY
1562typename enable_if
1563<
1564    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1565    _Tp
1566>::type
1567atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1568{
1569    return __o->fetch_xor(__op);
1570}
1571
1572template <class _Tp>
1573inline _LIBCPP_INLINE_VISIBILITY
1574typename enable_if
1575<
1576    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1577    _Tp
1578>::type
1579atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1580{
1581    return __o->fetch_xor(__op);
1582}
1583
1584// atomic_fetch_xor_explicit
1585
1586template <class _Tp>
1587inline _LIBCPP_INLINE_VISIBILITY
1588typename enable_if
1589<
1590    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1591    _Tp
1592>::type
1593atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1594{
1595    return __o->fetch_xor(__op, __m);
1596}
1597
1598template <class _Tp>
1599inline _LIBCPP_INLINE_VISIBILITY
1600typename enable_if
1601<
1602    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1603    _Tp
1604>::type
1605atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1606{
1607    return __o->fetch_xor(__op, __m);
1608}
1609
1610// flag type and operations
1611
1612typedef struct atomic_flag
1613{
1614    _Atomic(bool) __a_;
1615
1616    _LIBCPP_INLINE_VISIBILITY
1617    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1618        {return __c11_atomic_exchange(&__a_, true, __m);}
1619    _LIBCPP_INLINE_VISIBILITY
1620    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1621        {return __c11_atomic_exchange(&__a_, true, __m);}
1622    _LIBCPP_INLINE_VISIBILITY
1623    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1624        {__c11_atomic_store(&__a_, false, __m);}
1625    _LIBCPP_INLINE_VISIBILITY
1626    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1627        {__c11_atomic_store(&__a_, false, __m);}
1628
1629    _LIBCPP_INLINE_VISIBILITY
1630#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1631    atomic_flag() _NOEXCEPT = default;
1632#else
1633    atomic_flag() _NOEXCEPT : __a_() {}
1634#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1635
1636    _LIBCPP_INLINE_VISIBILITY
1637    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1638
1639#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1640    atomic_flag(const atomic_flag&) = delete;
1641    atomic_flag& operator=(const atomic_flag&) = delete;
1642    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1643#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1644private:
1645    atomic_flag(const atomic_flag&);
1646    atomic_flag& operator=(const atomic_flag&);
1647    atomic_flag& operator=(const atomic_flag&) volatile;
1648#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1649} atomic_flag;
1650
1651inline _LIBCPP_INLINE_VISIBILITY
1652bool
1653atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1654{
1655    return __o->test_and_set();
1656}
1657
1658inline _LIBCPP_INLINE_VISIBILITY
1659bool
1660atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1661{
1662    return __o->test_and_set();
1663}
1664
1665inline _LIBCPP_INLINE_VISIBILITY
1666bool
1667atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1668{
1669    return __o->test_and_set(__m);
1670}
1671
1672inline _LIBCPP_INLINE_VISIBILITY
1673bool
1674atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1675{
1676    return __o->test_and_set(__m);
1677}
1678
1679inline _LIBCPP_INLINE_VISIBILITY
1680void
1681atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1682{
1683    __o->clear();
1684}
1685
1686inline _LIBCPP_INLINE_VISIBILITY
1687void
1688atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1689{
1690    __o->clear();
1691}
1692
1693inline _LIBCPP_INLINE_VISIBILITY
1694void
1695atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1696{
1697    __o->clear(__m);
1698}
1699
1700inline _LIBCPP_INLINE_VISIBILITY
1701void
1702atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1703{
1704    __o->clear(__m);
1705}
1706
1707// fences
1708
1709inline _LIBCPP_INLINE_VISIBILITY
1710void
1711atomic_thread_fence(memory_order __m) _NOEXCEPT
1712{
1713    __c11_atomic_thread_fence(__m);
1714}
1715
1716inline _LIBCPP_INLINE_VISIBILITY
1717void
1718atomic_signal_fence(memory_order __m) _NOEXCEPT
1719{
1720    __c11_atomic_signal_fence(__m);
1721}
1722
1723// Atomics for standard typedef types
1724
1725typedef atomic<bool>               atomic_bool;
1726typedef atomic<char>               atomic_char;
1727typedef atomic<signed char>        atomic_schar;
1728typedef atomic<unsigned char>      atomic_uchar;
1729typedef atomic<short>              atomic_short;
1730typedef atomic<unsigned short>     atomic_ushort;
1731typedef atomic<int>                atomic_int;
1732typedef atomic<unsigned int>       atomic_uint;
1733typedef atomic<long>               atomic_long;
1734typedef atomic<unsigned long>      atomic_ulong;
1735typedef atomic<long long>          atomic_llong;
1736typedef atomic<unsigned long long> atomic_ullong;
1737typedef atomic<char16_t>           atomic_char16_t;
1738typedef atomic<char32_t>           atomic_char32_t;
1739typedef atomic<wchar_t>            atomic_wchar_t;
1740
1741typedef atomic<int_least8_t>   atomic_int_least8_t;
1742typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1743typedef atomic<int_least16_t>  atomic_int_least16_t;
1744typedef atomic<uint_least16_t> atomic_uint_least16_t;
1745typedef atomic<int_least32_t>  atomic_int_least32_t;
1746typedef atomic<uint_least32_t> atomic_uint_least32_t;
1747typedef atomic<int_least64_t>  atomic_int_least64_t;
1748typedef atomic<uint_least64_t> atomic_uint_least64_t;
1749
1750typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1751typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1752typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1753typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1754typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1755typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1756typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1757typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1758
1759typedef atomic<intptr_t>  atomic_intptr_t;
1760typedef atomic<uintptr_t> atomic_uintptr_t;
1761typedef atomic<size_t>    atomic_size_t;
1762typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1763typedef atomic<intmax_t>  atomic_intmax_t;
1764typedef atomic<uintmax_t> atomic_uintmax_t;
1765
1766#define ATOMIC_FLAG_INIT {false}
1767#define ATOMIC_VAR_INIT(__v) {__v}
1768
1769// lock-free property
1770
1771#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1772#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1773#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1774#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1775#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1776#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1777#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1778#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1779#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1780#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1781
1782#endif  //  !__has_feature(cxx_atomic)
1783
1784_LIBCPP_END_NAMESPACE_STD
1785
1786#endif  // !_LIBCPP_HAS_NO_THREADS
1787
1788#endif  // _LIBCPP_ATOMIC
1789