3 // Copyright (C) 2008-2014 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file bits/atomic_base.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly. @headername{atomic}
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
35 #include <bits/c++config.h>
37 #include <bits/atomic_lockfree_defines.h>
39 #ifndef _GLIBCXX_ALWAYS_INLINE
40 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((always_inline))
43 namespace std
_GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
48 * @defgroup atomics Atomics
50 * Components for performing atomic operations.
54 /// Enumeration for memory_order
55 typedef enum memory_order
65 enum __memory_order_modifier
67 __memory_order_mask
= 0x0ffff,
68 __memory_order_modifier_mask
= 0xffff0000,
69 __memory_order_hle_acquire
= 0x10000,
70 __memory_order_hle_release
= 0x20000
73 constexpr memory_order
74 operator|(memory_order __m
, __memory_order_modifier __mod
)
76 return memory_order(__m
| int(__mod
));
79 constexpr memory_order
80 operator&(memory_order __m
, __memory_order_modifier __mod
)
82 return memory_order(__m
& int(__mod
));
85 // Drop release ordering as per [atomics.types.operations.req]/21
86 constexpr memory_order
87 __cmpexch_failure_order2(memory_order __m
) noexcept
89 return __m
== memory_order_acq_rel
? memory_order_acquire
90 : __m
== memory_order_release
? memory_order_relaxed
: __m
;
93 constexpr memory_order
94 __cmpexch_failure_order(memory_order __m
) noexcept
96 return memory_order(__cmpexch_failure_order2(__m
& __memory_order_mask
)
97 | (__m
& __memory_order_modifier_mask
));
100 _GLIBCXX_ALWAYS_INLINE
void
101 atomic_thread_fence(memory_order __m
) noexcept
102 { __atomic_thread_fence(__m
); }
104 _GLIBCXX_ALWAYS_INLINE
void
105 atomic_signal_fence(memory_order __m
) noexcept
106 { __atomic_signal_fence(__m
); }
109 template<typename _Tp
>
111 kill_dependency(_Tp __y
) noexcept
118 // Base types for atomics.
119 template<typename _IntTp
>
120 struct __atomic_base
;
123 typedef __atomic_base
<char> atomic_char
;
126 typedef __atomic_base
<signed char> atomic_schar
;
129 typedef __atomic_base
<unsigned char> atomic_uchar
;
132 typedef __atomic_base
<short> atomic_short
;
135 typedef __atomic_base
<unsigned short> atomic_ushort
;
138 typedef __atomic_base
<int> atomic_int
;
141 typedef __atomic_base
<unsigned int> atomic_uint
;
144 typedef __atomic_base
<long> atomic_long
;
147 typedef __atomic_base
<unsigned long> atomic_ulong
;
150 typedef __atomic_base
<long long> atomic_llong
;
153 typedef __atomic_base
<unsigned long long> atomic_ullong
;
156 typedef __atomic_base
<wchar_t> atomic_wchar_t
;
159 typedef __atomic_base
<char16_t
> atomic_char16_t
;
162 typedef __atomic_base
<char32_t
> atomic_char32_t
;
165 typedef __atomic_base
<char32_t
> atomic_char32_t
;
168 /// atomic_int_least8_t
169 typedef __atomic_base
<int_least8_t> atomic_int_least8_t
;
171 /// atomic_uint_least8_t
172 typedef __atomic_base
<uint_least8_t> atomic_uint_least8_t
;
174 /// atomic_int_least16_t
175 typedef __atomic_base
<int_least16_t> atomic_int_least16_t
;
177 /// atomic_uint_least16_t
178 typedef __atomic_base
<uint_least16_t> atomic_uint_least16_t
;
180 /// atomic_int_least32_t
181 typedef __atomic_base
<int_least32_t> atomic_int_least32_t
;
183 /// atomic_uint_least32_t
184 typedef __atomic_base
<uint_least32_t> atomic_uint_least32_t
;
186 /// atomic_int_least64_t
187 typedef __atomic_base
<int_least64_t> atomic_int_least64_t
;
189 /// atomic_uint_least64_t
190 typedef __atomic_base
<uint_least64_t> atomic_uint_least64_t
;
193 /// atomic_int_fast8_t
194 typedef __atomic_base
<int_fast8_t> atomic_int_fast8_t
;
196 /// atomic_uint_fast8_t
197 typedef __atomic_base
<uint_fast8_t> atomic_uint_fast8_t
;
199 /// atomic_int_fast16_t
200 typedef __atomic_base
<int_fast16_t> atomic_int_fast16_t
;
202 /// atomic_uint_fast16_t
203 typedef __atomic_base
<uint_fast16_t> atomic_uint_fast16_t
;
205 /// atomic_int_fast32_t
206 typedef __atomic_base
<int_fast32_t> atomic_int_fast32_t
;
208 /// atomic_uint_fast32_t
209 typedef __atomic_base
<uint_fast32_t> atomic_uint_fast32_t
;
211 /// atomic_int_fast64_t
212 typedef __atomic_base
<int_fast64_t> atomic_int_fast64_t
;
214 /// atomic_uint_fast64_t
215 typedef __atomic_base
<uint_fast64_t> atomic_uint_fast64_t
;
219 typedef __atomic_base
<intptr_t> atomic_intptr_t
;
222 typedef __atomic_base
<uintptr_t> atomic_uintptr_t
;
225 typedef __atomic_base
<size_t> atomic_size_t
;
228 typedef __atomic_base
<intmax_t> atomic_intmax_t
;
231 typedef __atomic_base
<uintmax_t> atomic_uintmax_t
;
234 typedef __atomic_base
<ptrdiff_t> atomic_ptrdiff_t
;
237 #define ATOMIC_VAR_INIT(_VI) { _VI }
239 template<typename _Tp
>
242 template<typename _Tp
>
245 /* The target's "set" value for test-and-set may not be exactly 1. */
246 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
247 typedef bool __atomic_flag_data_type
;
249 typedef unsigned char __atomic_flag_data_type
;
253 * @brief Base type for atomic_flag.
255 * Base type is POD with data, allowing atomic_flag to derive from
256 * it and meet the standard layout type requirement. In addition to
257 * compatibility with a C interface, this allows different
258 * implementations of atomic_flag to use the same atomic operation
259 * functions, via a standard conversion to the __atomic_flag_base
262 _GLIBCXX_BEGIN_EXTERN_C
264 struct __atomic_flag_base
266 __atomic_flag_data_type _M_i
;
269 _GLIBCXX_END_EXTERN_C
271 #define ATOMIC_FLAG_INIT { 0 }
274 struct atomic_flag
: public __atomic_flag_base
276 atomic_flag() noexcept
= default;
277 ~atomic_flag() noexcept
= default;
278 atomic_flag(const atomic_flag
&) = delete;
279 atomic_flag
& operator=(const atomic_flag
&) = delete;
280 atomic_flag
& operator=(const atomic_flag
&) volatile = delete;
282 // Conversion to ATOMIC_FLAG_INIT.
283 constexpr atomic_flag(bool __i
) noexcept
284 : __atomic_flag_base
{ _S_init(__i
) }
287 _GLIBCXX_ALWAYS_INLINE
bool
288 test_and_set(memory_order __m
= memory_order_seq_cst
) noexcept
290 return __atomic_test_and_set (&_M_i
, __m
);
293 _GLIBCXX_ALWAYS_INLINE
bool
294 test_and_set(memory_order __m
= memory_order_seq_cst
) volatile noexcept
296 return __atomic_test_and_set (&_M_i
, __m
);
299 _GLIBCXX_ALWAYS_INLINE
void
300 clear(memory_order __m
= memory_order_seq_cst
) noexcept
302 memory_order __b
= __m
& __memory_order_mask
;
303 __glibcxx_assert(__b
!= memory_order_consume
);
304 __glibcxx_assert(__b
!= memory_order_acquire
);
305 __glibcxx_assert(__b
!= memory_order_acq_rel
);
307 __atomic_clear (&_M_i
, __m
);
310 _GLIBCXX_ALWAYS_INLINE
void
311 clear(memory_order __m
= memory_order_seq_cst
) volatile noexcept
313 memory_order __b
= __m
& __memory_order_mask
;
314 __glibcxx_assert(__b
!= memory_order_consume
);
315 __glibcxx_assert(__b
!= memory_order_acquire
);
316 __glibcxx_assert(__b
!= memory_order_acq_rel
);
318 __atomic_clear (&_M_i
, __m
);
322 static constexpr __atomic_flag_data_type
324 { return __i
? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL
: 0; }
328 /// Base class for atomic integrals.
330 // For each of the integral types, define atomic_[integral type] struct
334 // atomic_schar signed char
335 // atomic_uchar unsigned char
336 // atomic_short short
337 // atomic_ushort unsigned short
339 // atomic_uint unsigned int
341 // atomic_ulong unsigned long
342 // atomic_llong long long
343 // atomic_ullong unsigned long long
344 // atomic_char16_t char16_t
345 // atomic_char32_t char32_t
346 // atomic_wchar_t wchar_t
348 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
349 // 8 bytes, since that is what GCC built-in functions for atomic
350 // memory access expect.
351 template<typename _ITp
>
355 typedef _ITp __int_type
;
360 __atomic_base() noexcept
= default;
361 ~__atomic_base() noexcept
= default;
362 __atomic_base(const __atomic_base
&) = delete;
363 __atomic_base
& operator=(const __atomic_base
&) = delete;
364 __atomic_base
& operator=(const __atomic_base
&) volatile = delete;
366 // Requires __int_type convertible to _M_i.
367 constexpr __atomic_base(__int_type __i
) noexcept
: _M_i (__i
) { }
369 operator __int_type() const noexcept
372 operator __int_type() const volatile noexcept
376 operator=(__int_type __i
) noexcept
383 operator=(__int_type __i
) volatile noexcept
390 operator++(int) noexcept
391 { return fetch_add(1); }
394 operator++(int) volatile noexcept
395 { return fetch_add(1); }
398 operator--(int) noexcept
399 { return fetch_sub(1); }
402 operator--(int) volatile noexcept
403 { return fetch_sub(1); }
406 operator++() noexcept
407 { return __atomic_add_fetch(&_M_i
, 1, memory_order_seq_cst
); }
410 operator++() volatile noexcept
411 { return __atomic_add_fetch(&_M_i
, 1, memory_order_seq_cst
); }
414 operator--() noexcept
415 { return __atomic_sub_fetch(&_M_i
, 1, memory_order_seq_cst
); }
418 operator--() volatile noexcept
419 { return __atomic_sub_fetch(&_M_i
, 1, memory_order_seq_cst
); }
422 operator+=(__int_type __i
) noexcept
423 { return __atomic_add_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
426 operator+=(__int_type __i
) volatile noexcept
427 { return __atomic_add_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
430 operator-=(__int_type __i
) noexcept
431 { return __atomic_sub_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
434 operator-=(__int_type __i
) volatile noexcept
435 { return __atomic_sub_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
438 operator&=(__int_type __i
) noexcept
439 { return __atomic_and_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
442 operator&=(__int_type __i
) volatile noexcept
443 { return __atomic_and_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
446 operator|=(__int_type __i
) noexcept
447 { return __atomic_or_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
450 operator|=(__int_type __i
) volatile noexcept
451 { return __atomic_or_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
454 operator^=(__int_type __i
) noexcept
455 { return __atomic_xor_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
458 operator^=(__int_type __i
) volatile noexcept
459 { return __atomic_xor_fetch(&_M_i
, __i
, memory_order_seq_cst
); }
462 is_lock_free() const noexcept
463 { return __atomic_is_lock_free(sizeof(_M_i
), nullptr); }
466 is_lock_free() const volatile noexcept
467 { return __atomic_is_lock_free(sizeof(_M_i
), nullptr); }
469 _GLIBCXX_ALWAYS_INLINE
void
470 store(__int_type __i
, memory_order __m
= memory_order_seq_cst
) noexcept
472 memory_order __b
= __m
& __memory_order_mask
;
473 __glibcxx_assert(__b
!= memory_order_acquire
);
474 __glibcxx_assert(__b
!= memory_order_acq_rel
);
475 __glibcxx_assert(__b
!= memory_order_consume
);
477 __atomic_store_n(&_M_i
, __i
, __m
);
480 _GLIBCXX_ALWAYS_INLINE
void
481 store(__int_type __i
,
482 memory_order __m
= memory_order_seq_cst
) volatile noexcept
484 memory_order __b
= __m
& __memory_order_mask
;
485 __glibcxx_assert(__b
!= memory_order_acquire
);
486 __glibcxx_assert(__b
!= memory_order_acq_rel
);
487 __glibcxx_assert(__b
!= memory_order_consume
);
489 __atomic_store_n(&_M_i
, __i
, __m
);
492 _GLIBCXX_ALWAYS_INLINE __int_type
493 load(memory_order __m
= memory_order_seq_cst
) const noexcept
495 memory_order __b
= __m
& __memory_order_mask
;
496 __glibcxx_assert(__b
!= memory_order_release
);
497 __glibcxx_assert(__b
!= memory_order_acq_rel
);
499 return __atomic_load_n(&_M_i
, __m
);
502 _GLIBCXX_ALWAYS_INLINE __int_type
503 load(memory_order __m
= memory_order_seq_cst
) const volatile noexcept
505 memory_order __b
= __m
& __memory_order_mask
;
506 __glibcxx_assert(__b
!= memory_order_release
);
507 __glibcxx_assert(__b
!= memory_order_acq_rel
);
509 return __atomic_load_n(&_M_i
, __m
);
512 _GLIBCXX_ALWAYS_INLINE __int_type
513 exchange(__int_type __i
,
514 memory_order __m
= memory_order_seq_cst
) noexcept
516 return __atomic_exchange_n(&_M_i
, __i
, __m
);
520 _GLIBCXX_ALWAYS_INLINE __int_type
521 exchange(__int_type __i
,
522 memory_order __m
= memory_order_seq_cst
) volatile noexcept
524 return __atomic_exchange_n(&_M_i
, __i
, __m
);
527 _GLIBCXX_ALWAYS_INLINE
bool
528 compare_exchange_weak(__int_type
& __i1
, __int_type __i2
,
529 memory_order __m1
, memory_order __m2
) noexcept
531 memory_order __b2
= __m2
& __memory_order_mask
;
532 memory_order __b1
= __m1
& __memory_order_mask
;
533 __glibcxx_assert(__b2
!= memory_order_release
);
534 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
535 __glibcxx_assert(__b2
<= __b1
);
537 return __atomic_compare_exchange_n(&_M_i
, &__i1
, __i2
, 1, __m1
, __m2
);
540 _GLIBCXX_ALWAYS_INLINE
bool
541 compare_exchange_weak(__int_type
& __i1
, __int_type __i2
,
543 memory_order __m2
) volatile noexcept
545 memory_order __b2
= __m2
& __memory_order_mask
;
546 memory_order __b1
= __m1
& __memory_order_mask
;
547 __glibcxx_assert(__b2
!= memory_order_release
);
548 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
549 __glibcxx_assert(__b2
<= __b1
);
551 return __atomic_compare_exchange_n(&_M_i
, &__i1
, __i2
, 1, __m1
, __m2
);
554 _GLIBCXX_ALWAYS_INLINE
bool
555 compare_exchange_weak(__int_type
& __i1
, __int_type __i2
,
556 memory_order __m
= memory_order_seq_cst
) noexcept
558 return compare_exchange_weak(__i1
, __i2
, __m
,
559 __cmpexch_failure_order(__m
));
562 _GLIBCXX_ALWAYS_INLINE
bool
563 compare_exchange_weak(__int_type
& __i1
, __int_type __i2
,
564 memory_order __m
= memory_order_seq_cst
) volatile noexcept
566 return compare_exchange_weak(__i1
, __i2
, __m
,
567 __cmpexch_failure_order(__m
));
570 _GLIBCXX_ALWAYS_INLINE
bool
571 compare_exchange_strong(__int_type
& __i1
, __int_type __i2
,
572 memory_order __m1
, memory_order __m2
) noexcept
574 memory_order __b2
= __m2
& __memory_order_mask
;
575 memory_order __b1
= __m1
& __memory_order_mask
;
576 __glibcxx_assert(__b2
!= memory_order_release
);
577 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
578 __glibcxx_assert(__b2
<= __b1
);
580 return __atomic_compare_exchange_n(&_M_i
, &__i1
, __i2
, 0, __m1
, __m2
);
583 _GLIBCXX_ALWAYS_INLINE
bool
584 compare_exchange_strong(__int_type
& __i1
, __int_type __i2
,
586 memory_order __m2
) volatile noexcept
588 memory_order __b2
= __m2
& __memory_order_mask
;
589 memory_order __b1
= __m1
& __memory_order_mask
;
591 __glibcxx_assert(__b2
!= memory_order_release
);
592 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
593 __glibcxx_assert(__b2
<= __b1
);
595 return __atomic_compare_exchange_n(&_M_i
, &__i1
, __i2
, 0, __m1
, __m2
);
598 _GLIBCXX_ALWAYS_INLINE
bool
599 compare_exchange_strong(__int_type
& __i1
, __int_type __i2
,
600 memory_order __m
= memory_order_seq_cst
) noexcept
602 return compare_exchange_strong(__i1
, __i2
, __m
,
603 __cmpexch_failure_order(__m
));
606 _GLIBCXX_ALWAYS_INLINE
bool
607 compare_exchange_strong(__int_type
& __i1
, __int_type __i2
,
608 memory_order __m
= memory_order_seq_cst
) volatile noexcept
610 return compare_exchange_strong(__i1
, __i2
, __m
,
611 __cmpexch_failure_order(__m
));
614 _GLIBCXX_ALWAYS_INLINE __int_type
615 fetch_add(__int_type __i
,
616 memory_order __m
= memory_order_seq_cst
) noexcept
617 { return __atomic_fetch_add(&_M_i
, __i
, __m
); }
619 _GLIBCXX_ALWAYS_INLINE __int_type
620 fetch_add(__int_type __i
,
621 memory_order __m
= memory_order_seq_cst
) volatile noexcept
622 { return __atomic_fetch_add(&_M_i
, __i
, __m
); }
624 _GLIBCXX_ALWAYS_INLINE __int_type
625 fetch_sub(__int_type __i
,
626 memory_order __m
= memory_order_seq_cst
) noexcept
627 { return __atomic_fetch_sub(&_M_i
, __i
, __m
); }
629 _GLIBCXX_ALWAYS_INLINE __int_type
630 fetch_sub(__int_type __i
,
631 memory_order __m
= memory_order_seq_cst
) volatile noexcept
632 { return __atomic_fetch_sub(&_M_i
, __i
, __m
); }
634 _GLIBCXX_ALWAYS_INLINE __int_type
635 fetch_and(__int_type __i
,
636 memory_order __m
= memory_order_seq_cst
) noexcept
637 { return __atomic_fetch_and(&_M_i
, __i
, __m
); }
639 _GLIBCXX_ALWAYS_INLINE __int_type
640 fetch_and(__int_type __i
,
641 memory_order __m
= memory_order_seq_cst
) volatile noexcept
642 { return __atomic_fetch_and(&_M_i
, __i
, __m
); }
644 _GLIBCXX_ALWAYS_INLINE __int_type
645 fetch_or(__int_type __i
,
646 memory_order __m
= memory_order_seq_cst
) noexcept
647 { return __atomic_fetch_or(&_M_i
, __i
, __m
); }
649 _GLIBCXX_ALWAYS_INLINE __int_type
650 fetch_or(__int_type __i
,
651 memory_order __m
= memory_order_seq_cst
) volatile noexcept
652 { return __atomic_fetch_or(&_M_i
, __i
, __m
); }
654 _GLIBCXX_ALWAYS_INLINE __int_type
655 fetch_xor(__int_type __i
,
656 memory_order __m
= memory_order_seq_cst
) noexcept
657 { return __atomic_fetch_xor(&_M_i
, __i
, __m
); }
659 _GLIBCXX_ALWAYS_INLINE __int_type
660 fetch_xor(__int_type __i
,
661 memory_order __m
= memory_order_seq_cst
) volatile noexcept
662 { return __atomic_fetch_xor(&_M_i
, __i
, __m
); }
666 /// Partial specialization for pointer types.
667 template<typename _PTp
>
668 struct __atomic_base
<_PTp
*>
671 typedef _PTp
* __pointer_type
;
675 // Factored out to facilitate explicit specialization.
677 _M_type_size(ptrdiff_t __d
) const { return __d
* sizeof(_PTp
); }
680 _M_type_size(ptrdiff_t __d
) const volatile { return __d
* sizeof(_PTp
); }
683 __atomic_base() noexcept
= default;
684 ~__atomic_base() noexcept
= default;
685 __atomic_base(const __atomic_base
&) = delete;
686 __atomic_base
& operator=(const __atomic_base
&) = delete;
687 __atomic_base
& operator=(const __atomic_base
&) volatile = delete;
689 // Requires __pointer_type convertible to _M_p.
690 constexpr __atomic_base(__pointer_type __p
) noexcept
: _M_p (__p
) { }
692 operator __pointer_type() const noexcept
695 operator __pointer_type() const volatile noexcept
699 operator=(__pointer_type __p
) noexcept
706 operator=(__pointer_type __p
) volatile noexcept
713 operator++(int) noexcept
714 { return fetch_add(1); }
717 operator++(int) volatile noexcept
718 { return fetch_add(1); }
721 operator--(int) noexcept
722 { return fetch_sub(1); }
725 operator--(int) volatile noexcept
726 { return fetch_sub(1); }
729 operator++() noexcept
730 { return __atomic_add_fetch(&_M_p
, _M_type_size(1),
731 memory_order_seq_cst
); }
734 operator++() volatile noexcept
735 { return __atomic_add_fetch(&_M_p
, _M_type_size(1),
736 memory_order_seq_cst
); }
739 operator--() noexcept
740 { return __atomic_sub_fetch(&_M_p
, _M_type_size(1),
741 memory_order_seq_cst
); }
744 operator--() volatile noexcept
745 { return __atomic_sub_fetch(&_M_p
, _M_type_size(1),
746 memory_order_seq_cst
); }
749 operator+=(ptrdiff_t __d
) noexcept
750 { return __atomic_add_fetch(&_M_p
, _M_type_size(__d
),
751 memory_order_seq_cst
); }
754 operator+=(ptrdiff_t __d
) volatile noexcept
755 { return __atomic_add_fetch(&_M_p
, _M_type_size(__d
),
756 memory_order_seq_cst
); }
759 operator-=(ptrdiff_t __d
) noexcept
760 { return __atomic_sub_fetch(&_M_p
, _M_type_size(__d
),
761 memory_order_seq_cst
); }
764 operator-=(ptrdiff_t __d
) volatile noexcept
765 { return __atomic_sub_fetch(&_M_p
, _M_type_size(__d
),
766 memory_order_seq_cst
); }
769 is_lock_free() const noexcept
770 { return __atomic_is_lock_free(sizeof(__pointer_type
), nullptr); }
773 is_lock_free() const volatile noexcept
774 { return __atomic_is_lock_free(sizeof(__pointer_type
), nullptr); }
776 _GLIBCXX_ALWAYS_INLINE
void
777 store(__pointer_type __p
,
778 memory_order __m
= memory_order_seq_cst
) noexcept
780 memory_order __b
= __m
& __memory_order_mask
;
782 __glibcxx_assert(__b
!= memory_order_acquire
);
783 __glibcxx_assert(__b
!= memory_order_acq_rel
);
784 __glibcxx_assert(__b
!= memory_order_consume
);
786 __atomic_store_n(&_M_p
, __p
, __m
);
789 _GLIBCXX_ALWAYS_INLINE
void
790 store(__pointer_type __p
,
791 memory_order __m
= memory_order_seq_cst
) volatile noexcept
793 memory_order __b
= __m
& __memory_order_mask
;
794 __glibcxx_assert(__b
!= memory_order_acquire
);
795 __glibcxx_assert(__b
!= memory_order_acq_rel
);
796 __glibcxx_assert(__b
!= memory_order_consume
);
798 __atomic_store_n(&_M_p
, __p
, __m
);
801 _GLIBCXX_ALWAYS_INLINE __pointer_type
802 load(memory_order __m
= memory_order_seq_cst
) const noexcept
804 memory_order __b
= __m
& __memory_order_mask
;
805 __glibcxx_assert(__b
!= memory_order_release
);
806 __glibcxx_assert(__b
!= memory_order_acq_rel
);
808 return __atomic_load_n(&_M_p
, __m
);
811 _GLIBCXX_ALWAYS_INLINE __pointer_type
812 load(memory_order __m
= memory_order_seq_cst
) const volatile noexcept
814 memory_order __b
= __m
& __memory_order_mask
;
815 __glibcxx_assert(__b
!= memory_order_release
);
816 __glibcxx_assert(__b
!= memory_order_acq_rel
);
818 return __atomic_load_n(&_M_p
, __m
);
821 _GLIBCXX_ALWAYS_INLINE __pointer_type
822 exchange(__pointer_type __p
,
823 memory_order __m
= memory_order_seq_cst
) noexcept
825 return __atomic_exchange_n(&_M_p
, __p
, __m
);
829 _GLIBCXX_ALWAYS_INLINE __pointer_type
830 exchange(__pointer_type __p
,
831 memory_order __m
= memory_order_seq_cst
) volatile noexcept
833 return __atomic_exchange_n(&_M_p
, __p
, __m
);
836 _GLIBCXX_ALWAYS_INLINE
bool
837 compare_exchange_strong(__pointer_type
& __p1
, __pointer_type __p2
,
839 memory_order __m2
) noexcept
841 memory_order __b2
= __m2
& __memory_order_mask
;
842 memory_order __b1
= __m1
& __memory_order_mask
;
843 __glibcxx_assert(__b2
!= memory_order_release
);
844 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
845 __glibcxx_assert(__b2
<= __b1
);
847 return __atomic_compare_exchange_n(&_M_p
, &__p1
, __p2
, 0, __m1
, __m2
);
850 _GLIBCXX_ALWAYS_INLINE
bool
851 compare_exchange_strong(__pointer_type
& __p1
, __pointer_type __p2
,
853 memory_order __m2
) volatile noexcept
855 memory_order __b2
= __m2
& __memory_order_mask
;
856 memory_order __b1
= __m1
& __memory_order_mask
;
858 __glibcxx_assert(__b2
!= memory_order_release
);
859 __glibcxx_assert(__b2
!= memory_order_acq_rel
);
860 __glibcxx_assert(__b2
<= __b1
);
862 return __atomic_compare_exchange_n(&_M_p
, &__p1
, __p2
, 0, __m1
, __m2
);
865 _GLIBCXX_ALWAYS_INLINE __pointer_type
866 fetch_add(ptrdiff_t __d
,
867 memory_order __m
= memory_order_seq_cst
) noexcept
868 { return __atomic_fetch_add(&_M_p
, _M_type_size(__d
), __m
); }
870 _GLIBCXX_ALWAYS_INLINE __pointer_type
871 fetch_add(ptrdiff_t __d
,
872 memory_order __m
= memory_order_seq_cst
) volatile noexcept
873 { return __atomic_fetch_add(&_M_p
, _M_type_size(__d
), __m
); }
875 _GLIBCXX_ALWAYS_INLINE __pointer_type
876 fetch_sub(ptrdiff_t __d
,
877 memory_order __m
= memory_order_seq_cst
) noexcept
878 { return __atomic_fetch_sub(&_M_p
, _M_type_size(__d
), __m
); }
880 _GLIBCXX_ALWAYS_INLINE __pointer_type
881 fetch_sub(ptrdiff_t __d
,
882 memory_order __m
= memory_order_seq_cst
) volatile noexcept
883 { return __atomic_fetch_sub(&_M_p
, _M_type_size(__d
), __m
); }
888 _GLIBCXX_END_NAMESPACE_VERSION