1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4// The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15 atomic synopsis
16
17namespace std
18{
19
20// feature test macro
21
22#define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
23
24// order and consistency
25
26typedef enum memory_order
27{
28 memory_order_relaxed,
29 memory_order_consume, // load-consume
30 memory_order_acquire, // load-acquire
31 memory_order_release, // store-release
32 memory_order_acq_rel, // store-release load-acquire
33 memory_order_seq_cst // store-release load-acquire
34} memory_order;
35
36template <class T> T kill_dependency(T y) noexcept;
37
38// lock-free property
39
40#define ATOMIC_BOOL_LOCK_FREE unspecified
41#define ATOMIC_CHAR_LOCK_FREE unspecified
42#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
43#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
44#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
45#define ATOMIC_SHORT_LOCK_FREE unspecified
46#define ATOMIC_INT_LOCK_FREE unspecified
47#define ATOMIC_LONG_LOCK_FREE unspecified
48#define ATOMIC_LLONG_LOCK_FREE unspecified
49#define ATOMIC_POINTER_LOCK_FREE unspecified
50
51// flag type and operations
52
53typedef struct atomic_flag
54{
55 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
56 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
57 void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
58 void clear(memory_order m = memory_order_seq_cst) noexcept;
59 atomic_flag() noexcept = default;
60 atomic_flag(const atomic_flag&) = delete;
61 atomic_flag& operator=(const atomic_flag&) = delete;
62 atomic_flag& operator=(const atomic_flag&) volatile = delete;
63} atomic_flag;
64
65bool
66 atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
67
68bool
69 atomic_flag_test_and_set(atomic_flag* obj) noexcept;
70
71bool
72 atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
73 memory_order m) noexcept;
74
75bool
76 atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
77
78void
79 atomic_flag_clear(volatile atomic_flag* obj) noexcept;
80
81void
82 atomic_flag_clear(atomic_flag* obj) noexcept;
83
84void
85 atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
86
87void
88 atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
89
90#define ATOMIC_FLAG_INIT see below
91#define ATOMIC_VAR_INIT(value) see below
92
93template <class T>
94struct atomic
95{
96 static constexpr bool is_always_lock_free;
97 bool is_lock_free() const volatile noexcept;
98 bool is_lock_free() const noexcept;
99 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
100 void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
101 T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
102 T load(memory_order m = memory_order_seq_cst) const noexcept;
103 operator T() const volatile noexcept;
104 operator T() const noexcept;
105 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
106 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
107 bool compare_exchange_weak(T& expc, T desr,
108 memory_order s, memory_order f) volatile noexcept;
109 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
110 bool compare_exchange_strong(T& expc, T desr,
111 memory_order s, memory_order f) volatile noexcept;
112 bool compare_exchange_strong(T& expc, T desr,
113 memory_order s, memory_order f) noexcept;
114 bool compare_exchange_weak(T& expc, T desr,
115 memory_order m = memory_order_seq_cst) volatile noexcept;
116 bool compare_exchange_weak(T& expc, T desr,
117 memory_order m = memory_order_seq_cst) noexcept;
118 bool compare_exchange_strong(T& expc, T desr,
119 memory_order m = memory_order_seq_cst) volatile noexcept;
120 bool compare_exchange_strong(T& expc, T desr,
121 memory_order m = memory_order_seq_cst) noexcept;
122
123 atomic() noexcept = default;
124 constexpr atomic(T desr) noexcept;
125 atomic(const atomic&) = delete;
126 atomic& operator=(const atomic&) = delete;
127 atomic& operator=(const atomic&) volatile = delete;
128 T operator=(T) volatile noexcept;
129 T operator=(T) noexcept;
130};
131
132template <>
133struct atomic<integral>
134{
135 static constexpr bool is_always_lock_free;
136 bool is_lock_free() const volatile noexcept;
137 bool is_lock_free() const noexcept;
138 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
139 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
141 integral load(memory_order m = memory_order_seq_cst) const noexcept;
142 operator integral() const volatile noexcept;
143 operator integral() const noexcept;
144 integral exchange(integral desr,
145 memory_order m = memory_order_seq_cst) volatile noexcept;
146 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
147 bool compare_exchange_weak(integral& expc, integral desr,
148 memory_order s, memory_order f) volatile noexcept;
149 bool compare_exchange_weak(integral& expc, integral desr,
150 memory_order s, memory_order f) noexcept;
151 bool compare_exchange_strong(integral& expc, integral desr,
152 memory_order s, memory_order f) volatile noexcept;
153 bool compare_exchange_strong(integral& expc, integral desr,
154 memory_order s, memory_order f) noexcept;
155 bool compare_exchange_weak(integral& expc, integral desr,
156 memory_order m = memory_order_seq_cst) volatile noexcept;
157 bool compare_exchange_weak(integral& expc, integral desr,
158 memory_order m = memory_order_seq_cst) noexcept;
159 bool compare_exchange_strong(integral& expc, integral desr,
160 memory_order m = memory_order_seq_cst) volatile noexcept;
161 bool compare_exchange_strong(integral& expc, integral desr,
162 memory_order m = memory_order_seq_cst) noexcept;
163
164 integral
165 fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
167 integral
168 fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
170 integral
171 fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
173 integral
174 fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
175 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
176 integral
177 fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
178 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
179
180 atomic() noexcept = default;
181 constexpr atomic(integral desr) noexcept;
182 atomic(const atomic&) = delete;
183 atomic& operator=(const atomic&) = delete;
184 atomic& operator=(const atomic&) volatile = delete;
185 integral operator=(integral desr) volatile noexcept;
186 integral operator=(integral desr) noexcept;
187
188 integral operator++(int) volatile noexcept;
189 integral operator++(int) noexcept;
190 integral operator--(int) volatile noexcept;
191 integral operator--(int) noexcept;
192 integral operator++() volatile noexcept;
193 integral operator++() noexcept;
194 integral operator--() volatile noexcept;
195 integral operator--() noexcept;
196 integral operator+=(integral op) volatile noexcept;
197 integral operator+=(integral op) noexcept;
198 integral operator-=(integral op) volatile noexcept;
199 integral operator-=(integral op) noexcept;
200 integral operator&=(integral op) volatile noexcept;
201 integral operator&=(integral op) noexcept;
202 integral operator|=(integral op) volatile noexcept;
203 integral operator|=(integral op) noexcept;
204 integral operator^=(integral op) volatile noexcept;
205 integral operator^=(integral op) noexcept;
206};
207
208template <class T>
209struct atomic<T*>
210{
211 static constexpr bool is_always_lock_free;
212 bool is_lock_free() const volatile noexcept;
213 bool is_lock_free() const noexcept;
214 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
215 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
216 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
217 T* load(memory_order m = memory_order_seq_cst) const noexcept;
218 operator T*() const volatile noexcept;
219 operator T*() const noexcept;
220 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
221 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
222 bool compare_exchange_weak(T*& expc, T* desr,
223 memory_order s, memory_order f) volatile noexcept;
224 bool compare_exchange_weak(T*& expc, T* desr,
225 memory_order s, memory_order f) noexcept;
226 bool compare_exchange_strong(T*& expc, T* desr,
227 memory_order s, memory_order f) volatile noexcept;
228 bool compare_exchange_strong(T*& expc, T* desr,
229 memory_order s, memory_order f) noexcept;
230 bool compare_exchange_weak(T*& expc, T* desr,
231 memory_order m = memory_order_seq_cst) volatile noexcept;
232 bool compare_exchange_weak(T*& expc, T* desr,
233 memory_order m = memory_order_seq_cst) noexcept;
234 bool compare_exchange_strong(T*& expc, T* desr,
235 memory_order m = memory_order_seq_cst) volatile noexcept;
236 bool compare_exchange_strong(T*& expc, T* desr,
237 memory_order m = memory_order_seq_cst) noexcept;
238 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
239 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
240 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
241 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
242
243 atomic() noexcept = default;
244 constexpr atomic(T* desr) noexcept;
245 atomic(const atomic&) = delete;
246 atomic& operator=(const atomic&) = delete;
247 atomic& operator=(const atomic&) volatile = delete;
248
249 T* operator=(T*) volatile noexcept;
250 T* operator=(T*) noexcept;
251 T* operator++(int) volatile noexcept;
252 T* operator++(int) noexcept;
253 T* operator--(int) volatile noexcept;
254 T* operator--(int) noexcept;
255 T* operator++() volatile noexcept;
256 T* operator++() noexcept;
257 T* operator--() volatile noexcept;
258 T* operator--() noexcept;
259 T* operator+=(ptrdiff_t op) volatile noexcept;
260 T* operator+=(ptrdiff_t op) noexcept;
261 T* operator-=(ptrdiff_t op) volatile noexcept;
262 T* operator-=(ptrdiff_t op) noexcept;
263};
264
265
266template <class T>
267 bool
268 atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
269
270template <class T>
271 bool
272 atomic_is_lock_free(const atomic<T>* obj) noexcept;
273
274template <class T>
275 void
276 atomic_init(volatile atomic<T>* obj, T desr) noexcept;
277
278template <class T>
279 void
280 atomic_init(atomic<T>* obj, T desr) noexcept;
281
282template <class T>
283 void
284 atomic_store(volatile atomic<T>* obj, T desr) noexcept;
285
286template <class T>
287 void
288 atomic_store(atomic<T>* obj, T desr) noexcept;
289
290template <class T>
291 void
292 atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
293
294template <class T>
295 void
296 atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
297
298template <class T>
299 T
300 atomic_load(const volatile atomic<T>* obj) noexcept;
301
302template <class T>
303 T
304 atomic_load(const atomic<T>* obj) noexcept;
305
306template <class T>
307 T
308 atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
309
310template <class T>
311 T
312 atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
313
314template <class T>
315 T
316 atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
317
318template <class T>
319 T
320 atomic_exchange(atomic<T>* obj, T desr) noexcept;
321
322template <class T>
323 T
324 atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
325
326template <class T>
327 T
328 atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
329
330template <class T>
331 bool
332 atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
333
334template <class T>
335 bool
336 atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
337
338template <class T>
339 bool
340 atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
341
342template <class T>
343 bool
344 atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
345
346template <class T>
347 bool
348 atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
349 T desr,
350 memory_order s, memory_order f) noexcept;
351
352template <class T>
353 bool
354 atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
355 memory_order s, memory_order f) noexcept;
356
357template <class T>
358 bool
359 atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
360 T* expc, T desr,
361 memory_order s, memory_order f) noexcept;
362
363template <class T>
364 bool
365 atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
366 T desr,
367 memory_order s, memory_order f) noexcept;
368
369template <class Integral>
370 Integral
371 atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
372
373template <class Integral>
374 Integral
375 atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
376
377template <class Integral>
378 Integral
379 atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
380 memory_order m) noexcept;
381template <class Integral>
382 Integral
383 atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
384 memory_order m) noexcept;
385template <class Integral>
386 Integral
387 atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
388
389template <class Integral>
390 Integral
391 atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
392
393template <class Integral>
394 Integral
395 atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
396 memory_order m) noexcept;
397template <class Integral>
398 Integral
399 atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
400 memory_order m) noexcept;
401template <class Integral>
402 Integral
403 atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
404
405template <class Integral>
406 Integral
407 atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
408
409template <class Integral>
410 Integral
411 atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
412 memory_order m) noexcept;
413template <class Integral>
414 Integral
415 atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
416 memory_order m) noexcept;
417template <class Integral>
418 Integral
419 atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
420
421template <class Integral>
422 Integral
423 atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
424
425template <class Integral>
426 Integral
427 atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
428 memory_order m) noexcept;
429template <class Integral>
430 Integral
431 atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
432 memory_order m) noexcept;
433template <class Integral>
434 Integral
435 atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
436
437template <class Integral>
438 Integral
439 atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
440
441template <class Integral>
442 Integral
443 atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
444 memory_order m) noexcept;
445template <class Integral>
446 Integral
447 atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
448 memory_order m) noexcept;
449
450template <class T>
451 T*
452 atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
453
454template <class T>
455 T*
456 atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
457
458template <class T>
459 T*
460 atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
461 memory_order m) noexcept;
462template <class T>
463 T*
464 atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
465
466template <class T>
467 T*
468 atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
469
470template <class T>
471 T*
472 atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
473
474template <class T>
475 T*
476 atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
477 memory_order m) noexcept;
478template <class T>
479 T*
480 atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
481
482// Atomics for standard typedef types
483
484typedef atomic<bool> atomic_bool;
485typedef atomic<char> atomic_char;
486typedef atomic<signed char> atomic_schar;
487typedef atomic<unsigned char> atomic_uchar;
488typedef atomic<short> atomic_short;
489typedef atomic<unsigned short> atomic_ushort;
490typedef atomic<int> atomic_int;
491typedef atomic<unsigned int> atomic_uint;
492typedef atomic<long> atomic_long;
493typedef atomic<unsigned long> atomic_ulong;
494typedef atomic<long long> atomic_llong;
495typedef atomic<unsigned long long> atomic_ullong;
496typedef atomic<char16_t> atomic_char16_t;
497typedef atomic<char32_t> atomic_char32_t;
498typedef atomic<wchar_t> atomic_wchar_t;
499
500typedef atomic<int_least8_t> atomic_int_least8_t;
501typedef atomic<uint_least8_t> atomic_uint_least8_t;
502typedef atomic<int_least16_t> atomic_int_least16_t;
503typedef atomic<uint_least16_t> atomic_uint_least16_t;
504typedef atomic<int_least32_t> atomic_int_least32_t;
505typedef atomic<uint_least32_t> atomic_uint_least32_t;
506typedef atomic<int_least64_t> atomic_int_least64_t;
507typedef atomic<uint_least64_t> atomic_uint_least64_t;
508
509typedef atomic<int_fast8_t> atomic_int_fast8_t;
510typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
511typedef atomic<int_fast16_t> atomic_int_fast16_t;
512typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
513typedef atomic<int_fast32_t> atomic_int_fast32_t;
514typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
515typedef atomic<int_fast64_t> atomic_int_fast64_t;
516typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
517
518typedef atomic<int8_t> atomic_int8_t;
519typedef atomic<uint8_t> atomic_uint8_t;
520typedef atomic<int16_t> atomic_int16_t;
521typedef atomic<uint16_t> atomic_uint16_t;
522typedef atomic<int32_t> atomic_int32_t;
523typedef atomic<uint32_t> atomic_uint32_t;
524typedef atomic<int64_t> atomic_int64_t;
525typedef atomic<uint64_t> atomic_uint64_t;
526
527typedef atomic<intptr_t> atomic_intptr_t;
528typedef atomic<uintptr_t> atomic_uintptr_t;
529typedef atomic<size_t> atomic_size_t;
530typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
531typedef atomic<intmax_t> atomic_intmax_t;
532typedef atomic<uintmax_t> atomic_uintmax_t;
533
534// fences
535
536void atomic_thread_fence(memory_order m) noexcept;
537void atomic_signal_fence(memory_order m) noexcept;
538
539} // std
540
541*/
542
543#include <__config>
544#include <cstddef>
545#include <cstdint>
546#include <type_traits>
547
548#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
549#pragma GCC system_header
550#endif
551
552#ifdef _LIBCPP_HAS_NO_THREADS
553#error <atomic> is not supported on this single threaded system
554#endif
555#if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
556#error <atomic> is not implemented
557#endif
558
559#if _LIBCPP_STD_VER > 14
560// FIXME: use the right feature test macro value as chose by SG10.
561# define __cpp_lib_atomic_is_always_lock_free 201603L
562#endif
563
564_LIBCPP_BEGIN_NAMESPACE_STD
565
566typedef enum memory_order
567{
568 memory_order_relaxed, memory_order_consume, memory_order_acquire,
569 memory_order_release, memory_order_acq_rel, memory_order_seq_cst
570} memory_order;
571
572#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
573namespace __gcc_atomic {
574template <typename _Tp>
575struct __gcc_atomic_t {
576
577#if _GNUC_VER >= 501
578 static_assert(is_trivially_copyable<_Tp>::value,
579 "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
580#endif
581
582 _LIBCPP_INLINE_VISIBILITY
583#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
584 __gcc_atomic_t() _NOEXCEPT = default;
585#else
586 __gcc_atomic_t() _NOEXCEPT : __a_value() {}
587#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
588 _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
589 : __a_value(value) {}
590 _Tp __a_value;
591};
592#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
593
594template <typename _Tp> _Tp __create();
595
596template <typename _Tp, typename _Td>
597typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
598 __test_atomic_assignable(int);
599template <typename _Tp, typename _Up>
600__two __test_atomic_assignable(...);
601
602template <typename _Tp, typename _Td>
603struct __can_assign {
604 static const bool value =
605 sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
606};
607
608static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
609 // Avoid switch statement to make this a constexpr.
610 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
611 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
612 (__order == memory_order_release ? __ATOMIC_RELEASE:
613 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
614 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
615 __ATOMIC_CONSUME))));
616}
617
618static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
619 // Avoid switch statement to make this a constexpr.
620 return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
621 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
622 (__order == memory_order_release ? __ATOMIC_RELAXED:
623 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
624 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
625 __ATOMIC_CONSUME))));
626}
627
628} // namespace __gcc_atomic
629
630template <typename _Tp>
631static inline
632typename enable_if<
633 __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
634__c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
635 __a->__a_value = __val;
636}
637
638template <typename _Tp>
639static inline
640typename enable_if<
641 !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
642 __gcc_atomic::__can_assign< _Atomic(_Tp)*, _Tp>::value>::type
643__c11_atomic_init(volatile _Atomic(_Tp)* __a, _Tp __val) {
644 // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
645 // the default operator= in an object is not volatile, a byte-by-byte copy
646 // is required.
647 volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
648 volatile char* end = to + sizeof(_Tp);
649 char* from = reinterpret_cast<char*>(&__val);
650 while (to != end) {
651 *to++ = *from++;
652 }
653}
654
655template <typename _Tp>
656static inline void __c11_atomic_init(_Atomic(_Tp)* __a, _Tp __val) {
657 __a->__a_value = __val;
658}
659
660static inline void __c11_atomic_thread_fence(memory_order __order) {
661 __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
662}
663
664static inline void __c11_atomic_signal_fence(memory_order __order) {
665 __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
666}
667
668template <typename _Tp>
669static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a, _Tp __val,
670 memory_order __order) {
671 return __atomic_store(&__a->__a_value, &__val,
672 __gcc_atomic::__to_gcc_order(__order));
673}
674
675template <typename _Tp>
676static inline void __c11_atomic_store(_Atomic(_Tp)* __a, _Tp __val,
677 memory_order __order) {
678 __atomic_store(&__a->__a_value, &__val,
679 __gcc_atomic::__to_gcc_order(__order));
680}
681
682template <typename _Tp>
683static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
684 memory_order __order) {
685 _Tp __ret;
686 __atomic_load(&__a->__a_value, &__ret,
687 __gcc_atomic::__to_gcc_order(__order));
688 return __ret;
689}
690
691template <typename _Tp>
692static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
693 _Tp __ret;
694 __atomic_load(&__a->__a_value, &__ret,
695 __gcc_atomic::__to_gcc_order(__order));
696 return __ret;
697}
698
699template <typename _Tp>
700static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
701 _Tp __value, memory_order __order) {
702 _Tp __ret;
703 __atomic_exchange(&__a->__a_value, &__value, &__ret,
704 __gcc_atomic::__to_gcc_order(__order));
705 return __ret;
706}
707
708template <typename _Tp>
709static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
710 memory_order __order) {
711 _Tp __ret;
712 __atomic_exchange(&__a->__a_value, &__value, &__ret,
713 __gcc_atomic::__to_gcc_order(__order));
714 return __ret;
715}
716
717template <typename _Tp>
718static inline bool __c11_atomic_compare_exchange_strong(
719 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
720 memory_order __success, memory_order __failure) {
721 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
722 false,
723 __gcc_atomic::__to_gcc_order(__success),
724 __gcc_atomic::__to_gcc_failure_order(__failure));
725}
726
727template <typename _Tp>
728static inline bool __c11_atomic_compare_exchange_strong(
729 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
730 memory_order __failure) {
731 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
732 false,
733 __gcc_atomic::__to_gcc_order(__success),
734 __gcc_atomic::__to_gcc_failure_order(__failure));
735}
736
737template <typename _Tp>
738static inline bool __c11_atomic_compare_exchange_weak(
739 volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
740 memory_order __success, memory_order __failure) {
741 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
742 true,
743 __gcc_atomic::__to_gcc_order(__success),
744 __gcc_atomic::__to_gcc_failure_order(__failure));
745}
746
747template <typename _Tp>
748static inline bool __c11_atomic_compare_exchange_weak(
749 _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
750 memory_order __failure) {
751 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
752 true,
753 __gcc_atomic::__to_gcc_order(__success),
754 __gcc_atomic::__to_gcc_failure_order(__failure));
755}
756
757template <typename _Tp>
758struct __skip_amt { enum {value = 1}; };
759
760template <typename _Tp>
761struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
762
763// FIXME: Haven't figured out what the spec says about using arrays with
764// atomic_fetch_add. Force a failure rather than creating bad behavior.
765template <typename _Tp>
766struct __skip_amt<_Tp[]> { };
767template <typename _Tp, int n>
768struct __skip_amt<_Tp[n]> { };
769
770template <typename _Tp, typename _Td>
771static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
772 _Td __delta, memory_order __order) {
773 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
774 __gcc_atomic::__to_gcc_order(__order));
775}
776
777template <typename _Tp, typename _Td>
778static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
779 memory_order __order) {
780 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
781 __gcc_atomic::__to_gcc_order(__order));
782}
783
784template <typename _Tp, typename _Td>
785static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
786 _Td __delta, memory_order __order) {
787 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
788 __gcc_atomic::__to_gcc_order(__order));
789}
790
791template <typename _Tp, typename _Td>
792static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
793 memory_order __order) {
794 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
795 __gcc_atomic::__to_gcc_order(__order));
796}
797
798template <typename _Tp>
799static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
800 _Tp __pattern, memory_order __order) {
801 return __atomic_fetch_and(&__a->__a_value, __pattern,
802 __gcc_atomic::__to_gcc_order(__order));
803}
804
805template <typename _Tp>
806static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
807 _Tp __pattern, memory_order __order) {
808 return __atomic_fetch_and(&__a->__a_value, __pattern,
809 __gcc_atomic::__to_gcc_order(__order));
810}
811
812template <typename _Tp>
813static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
814 _Tp __pattern, memory_order __order) {
815 return __atomic_fetch_or(&__a->__a_value, __pattern,
816 __gcc_atomic::__to_gcc_order(__order));
817}
818
819template <typename _Tp>
820static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
821 memory_order __order) {
822 return __atomic_fetch_or(&__a->__a_value, __pattern,
823 __gcc_atomic::__to_gcc_order(__order));
824}
825
826template <typename _Tp>
827static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
828 _Tp __pattern, memory_order __order) {
829 return __atomic_fetch_xor(&__a->__a_value, __pattern,
830 __gcc_atomic::__to_gcc_order(__order));
831}
832
833template <typename _Tp>
834static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
835 memory_order __order) {
836 return __atomic_fetch_xor(&__a->__a_value, __pattern,
837 __gcc_atomic::__to_gcc_order(__order));
838}
839#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
840
841template <class _Tp>
842inline _LIBCPP_INLINE_VISIBILITY
843_Tp
844kill_dependency(_Tp __y) _NOEXCEPT
845{
846 return __y;
847}
848
849#define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
850#define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
851#define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
852#define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
853#define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
854#define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
855#define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
856#define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
857#define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
858#define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
859
860// general atomic<T>
861
862template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
863struct __atomic_base // false
864{
865 mutable _Atomic(_Tp) __a_;
866
867#if defined(__cpp_lib_atomic_is_always_lock_free)
868 static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
869#endif
870
871 _LIBCPP_INLINE_VISIBILITY
872 bool is_lock_free() const volatile _NOEXCEPT
873 {
874#if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
875 return __c11_atomic_is_lock_free(sizeof(_Tp));
876#else
877 return __atomic_is_lock_free(sizeof(_Tp), 0);
878#endif
879 }
880 _LIBCPP_INLINE_VISIBILITY
881 bool is_lock_free() const _NOEXCEPT
882 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
883 _LIBCPP_INLINE_VISIBILITY
884 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
885 {__c11_atomic_store(&__a_, __d, __m);}
886 _LIBCPP_INLINE_VISIBILITY
887 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
888 {__c11_atomic_store(&__a_, __d, __m);}
889 _LIBCPP_INLINE_VISIBILITY
890 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
891 {return __c11_atomic_load(&__a_, __m);}
892 _LIBCPP_INLINE_VISIBILITY
893 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
894 {return __c11_atomic_load(&__a_, __m);}
895 _LIBCPP_INLINE_VISIBILITY
896 operator _Tp() const volatile _NOEXCEPT {return load();}
897 _LIBCPP_INLINE_VISIBILITY
898 operator _Tp() const _NOEXCEPT {return load();}
899 _LIBCPP_INLINE_VISIBILITY
900 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
901 {return __c11_atomic_exchange(&__a_, __d, __m);}
902 _LIBCPP_INLINE_VISIBILITY
903 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
904 {return __c11_atomic_exchange(&__a_, __d, __m);}
905 _LIBCPP_INLINE_VISIBILITY
906 bool compare_exchange_weak(_Tp& __e, _Tp __d,
907 memory_order __s, memory_order __f) volatile _NOEXCEPT
908 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
909 _LIBCPP_INLINE_VISIBILITY
910 bool compare_exchange_weak(_Tp& __e, _Tp __d,
911 memory_order __s, memory_order __f) _NOEXCEPT
912 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
913 _LIBCPP_INLINE_VISIBILITY
914 bool compare_exchange_strong(_Tp& __e, _Tp __d,
915 memory_order __s, memory_order __f) volatile _NOEXCEPT
916 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
917 _LIBCPP_INLINE_VISIBILITY
918 bool compare_exchange_strong(_Tp& __e, _Tp __d,
919 memory_order __s, memory_order __f) _NOEXCEPT
920 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
921 _LIBCPP_INLINE_VISIBILITY
922 bool compare_exchange_weak(_Tp& __e, _Tp __d,
923 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
924 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
925 _LIBCPP_INLINE_VISIBILITY
926 bool compare_exchange_weak(_Tp& __e, _Tp __d,
927 memory_order __m = memory_order_seq_cst) _NOEXCEPT
928 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
929 _LIBCPP_INLINE_VISIBILITY
930 bool compare_exchange_strong(_Tp& __e, _Tp __d,
931 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
932 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
933 _LIBCPP_INLINE_VISIBILITY
934 bool compare_exchange_strong(_Tp& __e, _Tp __d,
935 memory_order __m = memory_order_seq_cst) _NOEXCEPT
936 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
937
938 _LIBCPP_INLINE_VISIBILITY
939#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
940 __atomic_base() _NOEXCEPT = default;
941#else
942 __atomic_base() _NOEXCEPT : __a_() {}
943#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
944
945 _LIBCPP_INLINE_VISIBILITY
946 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
947#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
948 __atomic_base(const __atomic_base&) = delete;
949 __atomic_base& operator=(const __atomic_base&) = delete;
950 __atomic_base& operator=(const __atomic_base&) volatile = delete;
951#else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
952private:
953 __atomic_base(const __atomic_base&);
954 __atomic_base& operator=(const __atomic_base&);
955 __atomic_base& operator=(const __atomic_base&) volatile;
956#endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
957};
958
959#if defined(__cpp_lib_atomic_is_always_lock_free)
960template <class _Tp, bool __b>
961_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
962#endif
963
964// atomic<Integral>
965
966template <class _Tp>
967struct __atomic_base<_Tp, true>
968 : public __atomic_base<_Tp, false>
969{
970 typedef __atomic_base<_Tp, false> __base;
971 _LIBCPP_INLINE_VISIBILITY
972 __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
973 _LIBCPP_INLINE_VISIBILITY
974 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
975
976 _LIBCPP_INLINE_VISIBILITY
977 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
978 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
979 _LIBCPP_INLINE_VISIBILITY
980 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
981 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
982 _LIBCPP_INLINE_VISIBILITY
983 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
984 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
985 _LIBCPP_INLINE_VISIBILITY
986 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
987 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
988 _LIBCPP_INLINE_VISIBILITY
989 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
990 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
991 _LIBCPP_INLINE_VISIBILITY
992 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
993 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
994 _LIBCPP_INLINE_VISIBILITY
995 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
996 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
997 _LIBCPP_INLINE_VISIBILITY
998 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
999 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
1000 _LIBCPP_INLINE_VISIBILITY
1001 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1002 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1003 _LIBCPP_INLINE_VISIBILITY
1004 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1005 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1006
1007 _LIBCPP_INLINE_VISIBILITY
1008 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
1009 _LIBCPP_INLINE_VISIBILITY
1010 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
1011 _LIBCPP_INLINE_VISIBILITY
1012 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
1013 _LIBCPP_INLINE_VISIBILITY
1014 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
1015 _LIBCPP_INLINE_VISIBILITY
1016 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1017 _LIBCPP_INLINE_VISIBILITY
1018 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
1019 _LIBCPP_INLINE_VISIBILITY
1020 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1021 _LIBCPP_INLINE_VISIBILITY
1022 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
1023 _LIBCPP_INLINE_VISIBILITY
1024 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1025 _LIBCPP_INLINE_VISIBILITY
1026 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1027 _LIBCPP_INLINE_VISIBILITY
1028 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1029 _LIBCPP_INLINE_VISIBILITY
1030 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1031 _LIBCPP_INLINE_VISIBILITY
1032 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1033 _LIBCPP_INLINE_VISIBILITY
1034 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
1035 _LIBCPP_INLINE_VISIBILITY
1036 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1037 _LIBCPP_INLINE_VISIBILITY
1038 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
1039 _LIBCPP_INLINE_VISIBILITY
1040 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1041 _LIBCPP_INLINE_VISIBILITY
1042 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1043};
1044
1045// atomic<T>
1046
1047template <class _Tp>
1048struct atomic
1049 : public __atomic_base<_Tp>
1050{
1051 typedef __atomic_base<_Tp> __base;
1052 _LIBCPP_INLINE_VISIBILITY
1053 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1054 _LIBCPP_INLINE_VISIBILITY
1055 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1056
1057 _LIBCPP_INLINE_VISIBILITY
1058 _Tp operator=(_Tp __d) volatile _NOEXCEPT
1059 {__base::store(__d); return __d;}
1060 _LIBCPP_INLINE_VISIBILITY
1061 _Tp operator=(_Tp __d) _NOEXCEPT
1062 {__base::store(__d); return __d;}
1063};
1064
1065// atomic<T*>
1066
1067template <class _Tp>
1068struct atomic<_Tp*>
1069 : public __atomic_base<_Tp*>
1070{
1071 typedef __atomic_base<_Tp*> __base;
1072 _LIBCPP_INLINE_VISIBILITY
1073 atomic() _NOEXCEPT _LIBCPP_DEFAULT
1074 _LIBCPP_INLINE_VISIBILITY
1075 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1076
1077 _LIBCPP_INLINE_VISIBILITY
1078 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1079 {__base::store(__d); return __d;}
1080 _LIBCPP_INLINE_VISIBILITY
1081 _Tp* operator=(_Tp* __d) _NOEXCEPT
1082 {__base::store(__d); return __d;}
1083
1084 _LIBCPP_INLINE_VISIBILITY
1085 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1086 volatile _NOEXCEPT
1087 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1088 _LIBCPP_INLINE_VISIBILITY
1089 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1090 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1091 _LIBCPP_INLINE_VISIBILITY
1092 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1093 volatile _NOEXCEPT
1094 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1095 _LIBCPP_INLINE_VISIBILITY
1096 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1097 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1098
1099 _LIBCPP_INLINE_VISIBILITY
1100 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
1101 _LIBCPP_INLINE_VISIBILITY
1102 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
1103 _LIBCPP_INLINE_VISIBILITY
1104 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
1105 _LIBCPP_INLINE_VISIBILITY
1106 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
1107 _LIBCPP_INLINE_VISIBILITY
1108 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
1109 _LIBCPP_INLINE_VISIBILITY
1110 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
1111 _LIBCPP_INLINE_VISIBILITY
1112 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
1113 _LIBCPP_INLINE_VISIBILITY
1114 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
1115 _LIBCPP_INLINE_VISIBILITY
1116 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1117 _LIBCPP_INLINE_VISIBILITY
1118 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
1119 _LIBCPP_INLINE_VISIBILITY
1120 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1121 _LIBCPP_INLINE_VISIBILITY
1122 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
1123};
1124
1125// atomic_is_lock_free
1126
1127template <class _Tp>
1128inline _LIBCPP_INLINE_VISIBILITY
1129bool
1130atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1131{
1132 return __o->is_lock_free();
1133}
1134
1135template <class _Tp>
1136inline _LIBCPP_INLINE_VISIBILITY
1137bool
1138atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1139{
1140 return __o->is_lock_free();
1141}
1142
1143// atomic_init
1144
1145template <class _Tp>
1146inline _LIBCPP_INLINE_VISIBILITY
1147void
1148atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1149{
1150 __c11_atomic_init(&__o->__a_, __d);
1151}
1152
1153template <class _Tp>
1154inline _LIBCPP_INLINE_VISIBILITY
1155void
1156atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1157{
1158 __c11_atomic_init(&__o->__a_, __d);
1159}
1160
1161// atomic_store
1162
1163template <class _Tp>
1164inline _LIBCPP_INLINE_VISIBILITY
1165void
1166atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1167{
1168 __o->store(__d);
1169}
1170
1171template <class _Tp>
1172inline _LIBCPP_INLINE_VISIBILITY
1173void
1174atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1175{
1176 __o->store(__d);
1177}
1178
1179// atomic_store_explicit
1180
1181template <class _Tp>
1182inline _LIBCPP_INLINE_VISIBILITY
1183void
1184atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1185{
1186 __o->store(__d, __m);
1187}
1188
1189template <class _Tp>
1190inline _LIBCPP_INLINE_VISIBILITY
1191void
1192atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1193{
1194 __o->store(__d, __m);
1195}
1196
1197// atomic_load
1198
1199template <class _Tp>
1200inline _LIBCPP_INLINE_VISIBILITY
1201_Tp
1202atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1203{
1204 return __o->load();
1205}
1206
1207template <class _Tp>
1208inline _LIBCPP_INLINE_VISIBILITY
1209_Tp
1210atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1211{
1212 return __o->load();
1213}
1214
1215// atomic_load_explicit
1216
1217template <class _Tp>
1218inline _LIBCPP_INLINE_VISIBILITY
1219_Tp
1220atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1221{
1222 return __o->load(__m);
1223}
1224
1225template <class _Tp>
1226inline _LIBCPP_INLINE_VISIBILITY
1227_Tp
1228atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1229{
1230 return __o->load(__m);
1231}
1232
1233// atomic_exchange
1234
1235template <class _Tp>
1236inline _LIBCPP_INLINE_VISIBILITY
1237_Tp
1238atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1239{
1240 return __o->exchange(__d);
1241}
1242
1243template <class _Tp>
1244inline _LIBCPP_INLINE_VISIBILITY
1245_Tp
1246atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1247{
1248 return __o->exchange(__d);
1249}
1250
1251// atomic_exchange_explicit
1252
1253template <class _Tp>
1254inline _LIBCPP_INLINE_VISIBILITY
1255_Tp
1256atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1257{
1258 return __o->exchange(__d, __m);
1259}
1260
1261template <class _Tp>
1262inline _LIBCPP_INLINE_VISIBILITY
1263_Tp
1264atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1265{
1266 return __o->exchange(__d, __m);
1267}
1268
1269// atomic_compare_exchange_weak
1270
1271template <class _Tp>
1272inline _LIBCPP_INLINE_VISIBILITY
1273bool
1274atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1275{
1276 return __o->compare_exchange_weak(*__e, __d);
1277}
1278
1279template <class _Tp>
1280inline _LIBCPP_INLINE_VISIBILITY
1281bool
1282atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1283{
1284 return __o->compare_exchange_weak(*__e, __d);
1285}
1286
1287// atomic_compare_exchange_strong
1288
1289template <class _Tp>
1290inline _LIBCPP_INLINE_VISIBILITY
1291bool
1292atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1293{
1294 return __o->compare_exchange_strong(*__e, __d);
1295}
1296
1297template <class _Tp>
1298inline _LIBCPP_INLINE_VISIBILITY
1299bool
1300atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1301{
1302 return __o->compare_exchange_strong(*__e, __d);
1303}
1304
1305// atomic_compare_exchange_weak_explicit
1306
1307template <class _Tp>
1308inline _LIBCPP_INLINE_VISIBILITY
1309bool
1310atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1311 _Tp __d,
1312 memory_order __s, memory_order __f) _NOEXCEPT
1313{
1314 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1315}
1316
1317template <class _Tp>
1318inline _LIBCPP_INLINE_VISIBILITY
1319bool
1320atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1321 memory_order __s, memory_order __f) _NOEXCEPT
1322{
1323 return __o->compare_exchange_weak(*__e, __d, __s, __f);
1324}
1325
1326// atomic_compare_exchange_strong_explicit
1327
1328template <class _Tp>
1329inline _LIBCPP_INLINE_VISIBILITY
1330bool
1331atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1332 _Tp* __e, _Tp __d,
1333 memory_order __s, memory_order __f) _NOEXCEPT
1334{
1335 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1336}
1337
1338template <class _Tp>
1339inline _LIBCPP_INLINE_VISIBILITY
1340bool
1341atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1342 _Tp __d,
1343 memory_order __s, memory_order __f) _NOEXCEPT
1344{
1345 return __o->compare_exchange_strong(*__e, __d, __s, __f);
1346}
1347
1348// atomic_fetch_add
1349
1350template <class _Tp>
1351inline _LIBCPP_INLINE_VISIBILITY
1352typename enable_if
1353<
1354 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1355 _Tp
1356>::type
1357atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1358{
1359 return __o->fetch_add(__op);
1360}
1361
1362template <class _Tp>
1363inline _LIBCPP_INLINE_VISIBILITY
1364typename enable_if
1365<
1366 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1367 _Tp
1368>::type
1369atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1370{
1371 return __o->fetch_add(__op);
1372}
1373
1374template <class _Tp>
1375inline _LIBCPP_INLINE_VISIBILITY
1376_Tp*
1377atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1378{
1379 return __o->fetch_add(__op);
1380}
1381
1382template <class _Tp>
1383inline _LIBCPP_INLINE_VISIBILITY
1384_Tp*
1385atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1386{
1387 return __o->fetch_add(__op);
1388}
1389
1390// atomic_fetch_add_explicit
1391
1392template <class _Tp>
1393inline _LIBCPP_INLINE_VISIBILITY
1394typename enable_if
1395<
1396 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1397 _Tp
1398>::type
1399atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1400{
1401 return __o->fetch_add(__op, __m);
1402}
1403
1404template <class _Tp>
1405inline _LIBCPP_INLINE_VISIBILITY
1406typename enable_if
1407<
1408 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1409 _Tp
1410>::type
1411atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1412{
1413 return __o->fetch_add(__op, __m);
1414}
1415
1416template <class _Tp>
1417inline _LIBCPP_INLINE_VISIBILITY
1418_Tp*
1419atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1420 memory_order __m) _NOEXCEPT
1421{
1422 return __o->fetch_add(__op, __m);
1423}
1424
1425template <class _Tp>
1426inline _LIBCPP_INLINE_VISIBILITY
1427_Tp*
1428atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1429{
1430 return __o->fetch_add(__op, __m);
1431}
1432
1433// atomic_fetch_sub
1434
1435template <class _Tp>
1436inline _LIBCPP_INLINE_VISIBILITY
1437typename enable_if
1438<
1439 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1440 _Tp
1441>::type
1442atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1443{
1444 return __o->fetch_sub(__op);
1445}
1446
1447template <class _Tp>
1448inline _LIBCPP_INLINE_VISIBILITY
1449typename enable_if
1450<
1451 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1452 _Tp
1453>::type
1454atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1455{
1456 return __o->fetch_sub(__op);
1457}
1458
1459template <class _Tp>
1460inline _LIBCPP_INLINE_VISIBILITY
1461_Tp*
1462atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1463{
1464 return __o->fetch_sub(__op);
1465}
1466
1467template <class _Tp>
1468inline _LIBCPP_INLINE_VISIBILITY
1469_Tp*
1470atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1471{
1472 return __o->fetch_sub(__op);
1473}
1474
1475// atomic_fetch_sub_explicit
1476
1477template <class _Tp>
1478inline _LIBCPP_INLINE_VISIBILITY
1479typename enable_if
1480<
1481 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1482 _Tp
1483>::type
1484atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1485{
1486 return __o->fetch_sub(__op, __m);
1487}
1488
1489template <class _Tp>
1490inline _LIBCPP_INLINE_VISIBILITY
1491typename enable_if
1492<
1493 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1494 _Tp
1495>::type
1496atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1497{
1498 return __o->fetch_sub(__op, __m);
1499}
1500
1501template <class _Tp>
1502inline _LIBCPP_INLINE_VISIBILITY
1503_Tp*
1504atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1505 memory_order __m) _NOEXCEPT
1506{
1507 return __o->fetch_sub(__op, __m);
1508}
1509
1510template <class _Tp>
1511inline _LIBCPP_INLINE_VISIBILITY
1512_Tp*
1513atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1514{
1515 return __o->fetch_sub(__op, __m);
1516}
1517
1518// atomic_fetch_and
1519
1520template <class _Tp>
1521inline _LIBCPP_INLINE_VISIBILITY
1522typename enable_if
1523<
1524 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1525 _Tp
1526>::type
1527atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1528{
1529 return __o->fetch_and(__op);
1530}
1531
1532template <class _Tp>
1533inline _LIBCPP_INLINE_VISIBILITY
1534typename enable_if
1535<
1536 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1537 _Tp
1538>::type
1539atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1540{
1541 return __o->fetch_and(__op);
1542}
1543
1544// atomic_fetch_and_explicit
1545
1546template <class _Tp>
1547inline _LIBCPP_INLINE_VISIBILITY
1548typename enable_if
1549<
1550 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1551 _Tp
1552>::type
1553atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1554{
1555 return __o->fetch_and(__op, __m);
1556}
1557
1558template <class _Tp>
1559inline _LIBCPP_INLINE_VISIBILITY
1560typename enable_if
1561<
1562 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1563 _Tp
1564>::type
1565atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1566{
1567 return __o->fetch_and(__op, __m);
1568}
1569
1570// atomic_fetch_or
1571
1572template <class _Tp>
1573inline _LIBCPP_INLINE_VISIBILITY
1574typename enable_if
1575<
1576 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1577 _Tp
1578>::type
1579atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1580{
1581 return __o->fetch_or(__op);
1582}
1583
1584template <class _Tp>
1585inline _LIBCPP_INLINE_VISIBILITY
1586typename enable_if
1587<
1588 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1589 _Tp
1590>::type
1591atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1592{
1593 return __o->fetch_or(__op);
1594}
1595
1596// atomic_fetch_or_explicit
1597
1598template <class _Tp>
1599inline _LIBCPP_INLINE_VISIBILITY
1600typename enable_if
1601<
1602 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1603 _Tp
1604>::type
1605atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1606{
1607 return __o->fetch_or(__op, __m);
1608}
1609
1610template <class _Tp>
1611inline _LIBCPP_INLINE_VISIBILITY
1612typename enable_if
1613<
1614 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1615 _Tp
1616>::type
1617atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1618{
1619 return __o->fetch_or(__op, __m);
1620}
1621
1622// atomic_fetch_xor
1623
1624template <class _Tp>
1625inline _LIBCPP_INLINE_VISIBILITY
1626typename enable_if
1627<
1628 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1629 _Tp
1630>::type
1631atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1632{
1633 return __o->fetch_xor(__op);
1634}
1635
1636template <class _Tp>
1637inline _LIBCPP_INLINE_VISIBILITY
1638typename enable_if
1639<
1640 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1641 _Tp
1642>::type
1643atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1644{
1645 return __o->fetch_xor(__op);
1646}
1647
1648// atomic_fetch_xor_explicit
1649
1650template <class _Tp>
1651inline _LIBCPP_INLINE_VISIBILITY
1652typename enable_if
1653<
1654 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1655 _Tp
1656>::type
1657atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1658{
1659 return __o->fetch_xor(__op, __m);
1660}
1661
1662template <class _Tp>
1663inline _LIBCPP_INLINE_VISIBILITY
1664typename enable_if
1665<
1666 is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1667 _Tp
1668>::type
1669atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1670{
1671 return __o->fetch_xor(__op, __m);
1672}
1673
1674// flag type and operations
1675
1676typedef struct atomic_flag
1677{
1678 _Atomic(bool) __a_;
1679
1680 _LIBCPP_INLINE_VISIBILITY
1681 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1682 {return __c11_atomic_exchange(&__a_, true, __m);}
1683 _LIBCPP_INLINE_VISIBILITY
1684 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1685 {return __c11_atomic_exchange(&__a_, true, __m);}
1686 _LIBCPP_INLINE_VISIBILITY
1687 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1688 {__c11_atomic_store(&__a_, false, __m);}
1689 _LIBCPP_INLINE_VISIBILITY
1690 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1691 {__c11_atomic_store(&__a_, false, __m);}
1692
1693 _LIBCPP_INLINE_VISIBILITY
1694#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1695 atomic_flag() _NOEXCEPT = default;
1696#else
1697 atomic_flag() _NOEXCEPT : __a_() {}
1698#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1699
1700 _LIBCPP_INLINE_VISIBILITY
1701 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
1702
1703#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1704 atomic_flag(const atomic_flag&) = delete;
1705 atomic_flag& operator=(const atomic_flag&) = delete;
1706 atomic_flag& operator=(const atomic_flag&) volatile = delete;
1707#else // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1708private:
1709 atomic_flag(const atomic_flag&);
1710 atomic_flag& operator=(const atomic_flag&);
1711 atomic_flag& operator=(const atomic_flag&) volatile;
1712#endif // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1713} atomic_flag;
1714
1715inline _LIBCPP_INLINE_VISIBILITY
1716bool
1717atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1718{
1719 return __o->test_and_set();
1720}
1721
1722inline _LIBCPP_INLINE_VISIBILITY
1723bool
1724atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1725{
1726 return __o->test_and_set();
1727}
1728
1729inline _LIBCPP_INLINE_VISIBILITY
1730bool
1731atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1732{
1733 return __o->test_and_set(__m);
1734}
1735
1736inline _LIBCPP_INLINE_VISIBILITY
1737bool
1738atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1739{
1740 return __o->test_and_set(__m);
1741}
1742
1743inline _LIBCPP_INLINE_VISIBILITY
1744void
1745atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1746{
1747 __o->clear();
1748}
1749
1750inline _LIBCPP_INLINE_VISIBILITY
1751void
1752atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1753{
1754 __o->clear();
1755}
1756
1757inline _LIBCPP_INLINE_VISIBILITY
1758void
1759atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1760{
1761 __o->clear(__m);
1762}
1763
1764inline _LIBCPP_INLINE_VISIBILITY
1765void
1766atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1767{
1768 __o->clear(__m);
1769}
1770
1771// fences
1772
1773inline _LIBCPP_INLINE_VISIBILITY
1774void
1775atomic_thread_fence(memory_order __m) _NOEXCEPT
1776{
1777 __c11_atomic_thread_fence(__m);
1778}
1779
1780inline _LIBCPP_INLINE_VISIBILITY
1781void
1782atomic_signal_fence(memory_order __m) _NOEXCEPT
1783{
1784 __c11_atomic_signal_fence(__m);
1785}
1786
1787// Atomics for standard typedef types
1788
1789typedef atomic<bool> atomic_bool;
1790typedef atomic<char> atomic_char;
1791typedef atomic<signed char> atomic_schar;
1792typedef atomic<unsigned char> atomic_uchar;
1793typedef atomic<short> atomic_short;
1794typedef atomic<unsigned short> atomic_ushort;
1795typedef atomic<int> atomic_int;
1796typedef atomic<unsigned int> atomic_uint;
1797typedef atomic<long> atomic_long;
1798typedef atomic<unsigned long> atomic_ulong;
1799typedef atomic<long long> atomic_llong;
1800typedef atomic<unsigned long long> atomic_ullong;
1801typedef atomic<char16_t> atomic_char16_t;
1802typedef atomic<char32_t> atomic_char32_t;
1803typedef atomic<wchar_t> atomic_wchar_t;
1804
1805typedef atomic<int_least8_t> atomic_int_least8_t;
1806typedef atomic<uint_least8_t> atomic_uint_least8_t;
1807typedef atomic<int_least16_t> atomic_int_least16_t;
1808typedef atomic<uint_least16_t> atomic_uint_least16_t;
1809typedef atomic<int_least32_t> atomic_int_least32_t;
1810typedef atomic<uint_least32_t> atomic_uint_least32_t;
1811typedef atomic<int_least64_t> atomic_int_least64_t;
1812typedef atomic<uint_least64_t> atomic_uint_least64_t;
1813
1814typedef atomic<int_fast8_t> atomic_int_fast8_t;
1815typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1816typedef atomic<int_fast16_t> atomic_int_fast16_t;
1817typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1818typedef atomic<int_fast32_t> atomic_int_fast32_t;
1819typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1820typedef atomic<int_fast64_t> atomic_int_fast64_t;
1821typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1822
1823typedef atomic< int8_t> atomic_int8_t;
1824typedef atomic<uint8_t> atomic_uint8_t;
1825typedef atomic< int16_t> atomic_int16_t;
1826typedef atomic<uint16_t> atomic_uint16_t;
1827typedef atomic< int32_t> atomic_int32_t;
1828typedef atomic<uint32_t> atomic_uint32_t;
1829typedef atomic< int64_t> atomic_int64_t;
1830typedef atomic<uint64_t> atomic_uint64_t;
1831
1832typedef atomic<intptr_t> atomic_intptr_t;
1833typedef atomic<uintptr_t> atomic_uintptr_t;
1834typedef atomic<size_t> atomic_size_t;
1835typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1836typedef atomic<intmax_t> atomic_intmax_t;
1837typedef atomic<uintmax_t> atomic_uintmax_t;
1838
1839#define ATOMIC_FLAG_INIT {false}
1840#define ATOMIC_VAR_INIT(__v) {__v}
1841
1842_LIBCPP_END_NAMESPACE_STD
1843
1844#endif // _LIBCPP_ATOMIC
1845