31 #ifndef _GLIBCXX_ATOMIC_0_H
32 #define _GLIBCXX_ATOMIC_0_H 1
34 #pragma GCC system_header
36 namespace std _GLIBCXX_VISIBILITY(default)
38 _GLIBCXX_BEGIN_NAMESPACE_VERSION
43 _GLIBCXX_BEGIN_EXTERN_C
46 atomic_flag_clear_explicit(__atomic_flag_base*,
memory_order)
50 __atomic_flag_wait_explicit(__atomic_flag_base*,
memory_order)
53 _GLIBCXX_CONST __atomic_flag_base*
54 __atomic_flag_for_address(const volatile
void* __z) _GLIBCXX_NOTHROW;
59 #define _ATOMIC_MEMBER_ _M_i
62 #define _ATOMIC_LOAD_(__a, __x) \
63 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
64 __i_type* __p = &_ATOMIC_MEMBER_; \
65 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
66 __atomic_flag_wait_explicit(__g, __x); \
67 __i_type __r = *__p; \
68 atomic_flag_clear_explicit(__g, __x); \
71 #define _ATOMIC_STORE_(__a, __n, __x) \
72 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
73 __i_type* __p = &_ATOMIC_MEMBER_; \
74 __typeof__(__n) __w = (__n); \
75 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
76 __atomic_flag_wait_explicit(__g, __x); \
78 atomic_flag_clear_explicit(__g, __x); \
81 #define _ATOMIC_MODIFY_(__a, __o, __n, __x) \
82 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
83 __i_type* __p = &_ATOMIC_MEMBER_; \
84 __typeof__(__n) __w = (__n); \
85 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
86 __atomic_flag_wait_explicit(__g, __x); \
87 __i_type __r = *__p; \
89 atomic_flag_clear_explicit(__g, __x); \
92 #define _ATOMIC_CMPEXCHNG_(__a, __e, __n, __x) \
93 ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type; \
94 __i_type* __p = &_ATOMIC_MEMBER_; \
95 __typeof__(__e) __q = (__e); \
96 __typeof__(__n) __w = (__n); \
98 __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
99 __atomic_flag_wait_explicit(__g, __x); \
100 __i_type __t = *__p; \
103 *__p = (__i_type)__w; \
106 else { *__q = __t; __r = false; } \
107 atomic_flag_clear_explicit(__g, __x); \
121 atomic_flag(
bool __i): __atomic_flag_base({ __i }) { }
127 test_and_set(
memory_order __m = memory_order_seq_cst)
volatile;
133 clear(
memory_order __m = memory_order_seq_cst)
volatile;
160 template<
typename _ITp>
164 typedef _ITp __int_type;
178 operator __int_type()
const
181 operator __int_type()
const volatile
185 operator=(__int_type __i)
192 operator=(__int_type __i)
volatile
200 {
return fetch_add(1); }
203 operator++(
int)
volatile
204 {
return fetch_add(1); }
208 {
return fetch_sub(1); }
211 operator--(
int)
volatile
212 {
return fetch_sub(1); }
216 {
return fetch_add(1) + 1; }
219 operator++()
volatile
220 {
return fetch_add(1) + 1; }
224 {
return fetch_sub(1) - 1; }
227 operator--()
volatile
228 {
return fetch_sub(1) - 1; }
231 operator+=(__int_type __i)
232 {
return fetch_add(__i) + __i; }
235 operator+=(__int_type __i)
volatile
236 {
return fetch_add(__i) + __i; }
239 operator-=(__int_type __i)
240 {
return fetch_sub(__i) - __i; }
243 operator-=(__int_type __i)
volatile
244 {
return fetch_sub(__i) - __i; }
247 operator&=(__int_type __i)
248 {
return fetch_and(__i) & __i; }
251 operator&=(__int_type __i)
volatile
252 {
return fetch_and(__i) & __i; }
255 operator|=(__int_type __i)
256 {
return fetch_or(__i) | __i; }
259 operator|=(__int_type __i)
volatile
260 {
return fetch_or(__i) | __i; }
263 operator^=(__int_type __i)
264 {
return fetch_xor(__i) ^ __i; }
267 operator^=(__int_type __i)
volatile
268 {
return fetch_xor(__i) ^ __i; }
275 is_lock_free()
const volatile
279 store(__int_type __i,
memory_order __m = memory_order_seq_cst)
281 __glibcxx_assert(__m != memory_order_acquire);
282 __glibcxx_assert(__m != memory_order_acq_rel);
283 __glibcxx_assert(__m != memory_order_consume);
284 _ATOMIC_STORE_(
this, __i, __m);
288 store(__int_type __i,
memory_order __m = memory_order_seq_cst)
volatile
290 __glibcxx_assert(__m != memory_order_acquire);
291 __glibcxx_assert(__m != memory_order_acq_rel);
292 __glibcxx_assert(__m != memory_order_consume);
293 _ATOMIC_STORE_(
this, __i, __m);
299 __glibcxx_assert(__m != memory_order_release);
300 __glibcxx_assert(__m != memory_order_acq_rel);
301 return _ATOMIC_LOAD_(
this, __m);
305 load(
memory_order __m = memory_order_seq_cst)
const volatile
307 __glibcxx_assert(__m != memory_order_release);
308 __glibcxx_assert(__m != memory_order_acq_rel);
309 return _ATOMIC_LOAD_(
this, __m);
313 exchange(__int_type __i,
memory_order __m = memory_order_seq_cst)
314 {
return _ATOMIC_MODIFY_(
this, =, __i, __m); }
317 exchange(__int_type __i,
memory_order __m = memory_order_seq_cst)
volatile
318 {
return _ATOMIC_MODIFY_(
this, =, __i, __m); }
321 compare_exchange_weak(__int_type& __i1, __int_type __i2,
324 __glibcxx_assert(__m2 != memory_order_release);
325 __glibcxx_assert(__m2 != memory_order_acq_rel);
326 __glibcxx_assert(__m2 <= __m1);
327 return _ATOMIC_CMPEXCHNG_(
this, &__i1, __i2, __m1);
331 compare_exchange_weak(__int_type& __i1, __int_type __i2,
334 __glibcxx_assert(__m2 != memory_order_release);
335 __glibcxx_assert(__m2 != memory_order_acq_rel);
336 __glibcxx_assert(__m2 <= __m1);
337 return _ATOMIC_CMPEXCHNG_(
this, &__i1, __i2, __m1);
341 compare_exchange_weak(__int_type& __i1, __int_type __i2,
344 return compare_exchange_weak(__i1, __i2, __m,
345 __calculate_memory_order(__m));
349 compare_exchange_weak(__int_type& __i1, __int_type __i2,
352 return compare_exchange_weak(__i1, __i2, __m,
353 __calculate_memory_order(__m));
357 compare_exchange_strong(__int_type& __i1, __int_type __i2,
360 __glibcxx_assert(__m2 != memory_order_release);
361 __glibcxx_assert(__m2 != memory_order_acq_rel);
362 __glibcxx_assert(__m2 <= __m1);
363 return _ATOMIC_CMPEXCHNG_(
this, &__i1, __i2, __m1);
367 compare_exchange_strong(__int_type& __i1, __int_type __i2,
370 __glibcxx_assert(__m2 != memory_order_release);
371 __glibcxx_assert(__m2 != memory_order_acq_rel);
372 __glibcxx_assert(__m2 <= __m1);
373 return _ATOMIC_CMPEXCHNG_(
this, &__i1, __i2, __m1);
377 compare_exchange_strong(__int_type& __i1, __int_type __i2,
380 return compare_exchange_strong(__i1, __i2, __m,
381 __calculate_memory_order(__m));
385 compare_exchange_strong(__int_type& __i1, __int_type __i2,
388 return compare_exchange_strong(__i1, __i2, __m,
389 __calculate_memory_order(__m));
393 fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst)
394 {
return _ATOMIC_MODIFY_(
this, +=, __i, __m); }
397 fetch_add(__int_type __i,
399 {
return _ATOMIC_MODIFY_(
this, +=, __i, __m); }
402 fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst)
403 {
return _ATOMIC_MODIFY_(
this, -=, __i, __m); }
406 fetch_sub(__int_type __i,
408 {
return _ATOMIC_MODIFY_(
this, -=, __i, __m); }
411 fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst)
412 {
return _ATOMIC_MODIFY_(
this, &=, __i, __m); }
415 fetch_and(__int_type __i,
417 {
return _ATOMIC_MODIFY_(
this, &=, __i, __m); }
420 fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst)
421 {
return _ATOMIC_MODIFY_(
this, |=, __i, __m); }
424 fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst)
volatile
425 {
return _ATOMIC_MODIFY_(
this, |=, __i, __m); }
428 fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst)
429 {
return _ATOMIC_MODIFY_(
this, ^=, __i, __m); }
432 fetch_xor(__int_type __i,
434 {
return _ATOMIC_MODIFY_(
this, ^=, __i, __m); }
439 template<
typename _PTp>
443 typedef _PTp* __return_pointer_type;
444 typedef void* __pointer_type;
455 constexpr
__atomic_base(__return_pointer_type __p): _M_i (__p) { }
457 operator __return_pointer_type()
const
458 {
return reinterpret_cast<__return_pointer_type
>(load()); }
460 operator __return_pointer_type()
const volatile
461 {
return reinterpret_cast<__return_pointer_type
>(load()); }
463 __return_pointer_type
464 operator=(__pointer_type __p)
467 return reinterpret_cast<__return_pointer_type
>(__p);
470 __return_pointer_type
471 operator=(__pointer_type __p)
volatile
474 return reinterpret_cast<__return_pointer_type
>(__p);
477 __return_pointer_type
479 {
return reinterpret_cast<__return_pointer_type
>(fetch_add(1)); }
481 __return_pointer_type
482 operator++(
int)
volatile
483 {
return reinterpret_cast<__return_pointer_type
>(fetch_add(1)); }
485 __return_pointer_type
487 {
return reinterpret_cast<__return_pointer_type
>(fetch_sub(1)); }
489 __return_pointer_type
490 operator--(
int)
volatile
491 {
return reinterpret_cast<__return_pointer_type
>(fetch_sub(1)); }
493 __return_pointer_type
495 {
return reinterpret_cast<__return_pointer_type
>(fetch_add(1) + 1); }
497 __return_pointer_type
498 operator++()
volatile
499 {
return reinterpret_cast<__return_pointer_type
>(fetch_add(1) + 1); }
501 __return_pointer_type
503 {
return reinterpret_cast<__return_pointer_type
>(fetch_sub(1) - 1); }
505 __return_pointer_type
506 operator--()
volatile
507 {
return reinterpret_cast<__return_pointer_type
>(fetch_sub(1) - 1); }
509 __return_pointer_type
510 operator+=(ptrdiff_t __d)
511 {
return reinterpret_cast<__return_pointer_type
>(fetch_add(__d) + __d); }
513 __return_pointer_type
514 operator+=(ptrdiff_t __d)
volatile
515 {
return reinterpret_cast<__return_pointer_type
>(fetch_add(__d) + __d); }
517 __return_pointer_type
518 operator-=(ptrdiff_t __d)
519 {
return reinterpret_cast<__return_pointer_type
>(fetch_sub(__d) - __d); }
521 __return_pointer_type
522 operator-=(ptrdiff_t __d)
volatile
523 {
return reinterpret_cast<__return_pointer_type
>(fetch_sub(__d) - __d); }
530 is_lock_free()
const volatile
534 store(__pointer_type __p,
memory_order __m = memory_order_seq_cst)
536 __glibcxx_assert(__m != memory_order_acquire);
537 __glibcxx_assert(__m != memory_order_acq_rel);
538 __glibcxx_assert(__m != memory_order_consume);
539 _ATOMIC_STORE_(
this, __p, __m);
543 store(__pointer_type __p,
546 __glibcxx_assert(__m != memory_order_acquire);
547 __glibcxx_assert(__m != memory_order_acq_rel);
548 __glibcxx_assert(__m != memory_order_consume);
549 volatile __pointer_type* __p2 = &_M_i;
550 __typeof__(__p) __w = (__p);
551 __atomic_flag_base* __g = __atomic_flag_for_address(__p2);
552 __atomic_flag_wait_explicit(__g, __m);
553 *__p2 =
reinterpret_cast<__pointer_type
>(__w);
554 atomic_flag_clear_explicit(__g, __m);
558 __return_pointer_type
561 __glibcxx_assert(__m != memory_order_release);
562 __glibcxx_assert(__m != memory_order_acq_rel);
563 void* __v = _ATOMIC_LOAD_(
this, __m);
564 return reinterpret_cast<__return_pointer_type
>(__v);
567 __return_pointer_type
568 load(
memory_order __m = memory_order_seq_cst)
const volatile
570 __glibcxx_assert(__m != memory_order_release);
571 __glibcxx_assert(__m != memory_order_acq_rel);
572 void* __v = _ATOMIC_LOAD_(
this, __m);
573 return reinterpret_cast<__return_pointer_type
>(__v);
576 __return_pointer_type
577 exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst)
579 void* __v = _ATOMIC_MODIFY_(
this, =, __p, __m);
580 return reinterpret_cast<__return_pointer_type
>(__v);
583 __return_pointer_type
584 exchange(__pointer_type __p,
587 volatile __pointer_type* __p2 = &_M_i;
588 __typeof__(__p) __w = (__p);
589 __atomic_flag_base* __g = __atomic_flag_for_address(__p2);
590 __atomic_flag_wait_explicit(__g, __m);
591 __pointer_type __r = *__p2;
593 atomic_flag_clear_explicit(__g, __m);
595 return reinterpret_cast<__return_pointer_type
>(_M_i);
599 compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
602 __glibcxx_assert(__m2 != memory_order_release);
603 __glibcxx_assert(__m2 != memory_order_acq_rel);
604 __glibcxx_assert(__m2 <= __m1);
605 __pointer_type& __p1 =
reinterpret_cast<void*&
>(__rp1);
606 return _ATOMIC_CMPEXCHNG_(
this, &__p1, __p2, __m1);
610 compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
613 __glibcxx_assert(__m2 != memory_order_release);
614 __glibcxx_assert(__m2 != memory_order_acq_rel);
615 __glibcxx_assert(__m2 <= __m1);
616 __pointer_type& __p1 =
reinterpret_cast<void*&
>(__rp1);
617 return _ATOMIC_CMPEXCHNG_(
this, &__p1, __p2, __m1);
620 __return_pointer_type
621 fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst)
623 void* __v = _ATOMIC_MODIFY_(
this, +=, __d, __m);
624 return reinterpret_cast<__return_pointer_type
>(__v);
627 __return_pointer_type
628 fetch_add(ptrdiff_t __d,
631 void* __v = _ATOMIC_MODIFY_(
this, +=, __d, __m);
632 return reinterpret_cast<__return_pointer_type
>(__v);
635 __return_pointer_type
636 fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst)
638 void* __v = _ATOMIC_MODIFY_(
this, -=, __d, __m);
639 return reinterpret_cast<__return_pointer_type
>(__v);
642 __return_pointer_type
643 fetch_sub(ptrdiff_t __d,
646 void* __v = _ATOMIC_MODIFY_(
this, -=, __d, __m);
647 return reinterpret_cast<__return_pointer_type
>(__v);
652 #undef _ATOMIC_STORE_
653 #undef _ATOMIC_MODIFY_
654 #undef _ATOMIC_CMPEXCHNG_
657 _GLIBCXX_END_NAMESPACE_VERSION
Base class for atomic integrals.
memory_order
Enumeration for memory_order.