From: Paolo Carlini Date: Thu, 4 Aug 2011 19:57:48 +0000 (+0000) Subject: atomic.cc: Use noexcept. X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=bdc05efbc844f9878e36b0e5427af4806502709c;p=gcc.git atomic.cc: Use noexcept. 2011-08-04 Paolo Carlini * src/atomic.cc: Use noexcept. * include/std/atomic: Likewise. * include/bits/atomic_0.h: Likewise. * include/bits/atomic_2.h: Likewise. * include/bits/atomic_base.h: Likewise. From-SVN: r177413 --- diff --git a/libstdc++-v3/ChangeLog b/libstdc++-v3/ChangeLog index c94b300f1df..97a1d83aac7 100644 --- a/libstdc++-v3/ChangeLog +++ b/libstdc++-v3/ChangeLog @@ -1,3 +1,11 @@ +2011-08-04 Paolo Carlini + + * src/atomic.cc: Use noexcept. + * include/std/atomic: Likewise. + * include/bits/atomic_0.h: Likewise. + * include/bits/atomic_2.h: Likewise. + * include/bits/atomic_base.h: Likewise. + 2011-08-03 Benjamin Kosnik * testsuite/performance/27_io/filebuf_sputn_unbuf.cc: Include diff --git a/libstdc++-v3/include/bits/atomic_0.h b/libstdc++-v3/include/bits/atomic_0.h index 84ff779ef05..4f8b0929f16 100644 --- a/libstdc++-v3/include/bits/atomic_0.h +++ b/libstdc++-v3/include/bits/atomic_0.h @@ -111,26 +111,26 @@ namespace __atomic0 /// atomic_flag struct atomic_flag : public __atomic_flag_base { - atomic_flag() = default; - ~atomic_flag() = default; + atomic_flag() noexcept = default; + ~atomic_flag() noexcept = default; atomic_flag(const atomic_flag&) = delete; atomic_flag& operator=(const atomic_flag&) = delete; atomic_flag& operator=(const atomic_flag&) volatile = delete; // Conversion to ATOMIC_FLAG_INIT. - atomic_flag(bool __i): __atomic_flag_base({ __i }) { } + atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { } bool - test_and_set(memory_order __m = memory_order_seq_cst); + test_and_set(memory_order __m = memory_order_seq_cst) noexcept; bool - test_and_set(memory_order __m = memory_order_seq_cst) volatile; + test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept; void - clear(memory_order __m = memory_order_seq_cst); + clear(memory_order __m = memory_order_seq_cst) noexcept; void - clear(memory_order __m = memory_order_seq_cst) volatile; + clear(memory_order __m = memory_order_seq_cst) volatile noexcept; }; @@ -166,117 +166,117 @@ namespace __atomic0 __int_type _M_i; public: - __atomic_base() = default; - ~__atomic_base() = default; + __atomic_base() noexcept = default; + ~__atomic_base() noexcept = default; __atomic_base(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) volatile = delete; // Requires __int_type convertible to _M_base._M_i. - constexpr __atomic_base(__int_type __i): _M_i (__i) { } + constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { } - operator __int_type() const + operator __int_type() const noexcept { return load(); } - operator __int_type() const volatile + operator __int_type() const volatile noexcept { return load(); } __int_type - operator=(__int_type __i) + operator=(__int_type __i) noexcept { store(__i); return __i; } __int_type - operator=(__int_type __i) volatile + operator=(__int_type __i) volatile noexcept { store(__i); return __i; } __int_type - operator++(int) + operator++(int) noexcept { return fetch_add(1); } __int_type - operator++(int) volatile + operator++(int) volatile noexcept { return fetch_add(1); } __int_type - operator--(int) + operator--(int) noexcept { return fetch_sub(1); } __int_type - operator--(int) volatile + operator--(int) volatile noexcept { return fetch_sub(1); } __int_type - operator++() + operator++() noexcept { return fetch_add(1) + 1; } __int_type - operator++() volatile + operator++() volatile noexcept { return fetch_add(1) + 1; } __int_type - operator--() + operator--() noexcept { return fetch_sub(1) - 1; } __int_type - operator--() volatile + operator--() volatile noexcept { return fetch_sub(1) - 1; } __int_type - operator+=(__int_type __i) + operator+=(__int_type __i) noexcept { return fetch_add(__i) + __i; } __int_type - operator+=(__int_type __i) volatile + operator+=(__int_type __i) volatile noexcept { return fetch_add(__i) + __i; } __int_type - operator-=(__int_type __i) + operator-=(__int_type __i) noexcept { return fetch_sub(__i) - __i; } __int_type - operator-=(__int_type __i) volatile + operator-=(__int_type __i) volatile noexcept { return fetch_sub(__i) - __i; } __int_type - operator&=(__int_type __i) + operator&=(__int_type __i) noexcept { return fetch_and(__i) & __i; } __int_type - operator&=(__int_type __i) volatile + operator&=(__int_type __i) volatile noexcept { return fetch_and(__i) & __i; } __int_type - operator|=(__int_type __i) + operator|=(__int_type __i) noexcept { return fetch_or(__i) | __i; } __int_type - operator|=(__int_type __i) volatile + operator|=(__int_type __i) volatile noexcept { return fetch_or(__i) | __i; } __int_type - operator^=(__int_type __i) + operator^=(__int_type __i) noexcept { return fetch_xor(__i) ^ __i; } __int_type - operator^=(__int_type __i) volatile + operator^=(__int_type __i) volatile noexcept { return fetch_xor(__i) ^ __i; } bool - is_lock_free() const + is_lock_free() const noexcept { return false; } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return false; } void - store(__int_type __i, memory_order __m = memory_order_seq_cst) + store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -285,7 +285,8 @@ namespace __atomic0 } void - store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + store(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -294,7 +295,7 @@ namespace __atomic0 } __int_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -302,7 +303,7 @@ namespace __atomic0 } __int_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -310,16 +311,18 @@ namespace __atomic0 } __int_type - exchange(__int_type __i, memory_order __m = memory_order_seq_cst) + exchange(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, =, __i, __m); } __int_type - exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + exchange(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, =, __i, __m); } bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -329,7 +332,8 @@ namespace __atomic0 bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -339,7 +343,7 @@ namespace __atomic0 bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_weak(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -347,7 +351,7 @@ namespace __atomic0 bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_weak(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -355,7 +359,7 @@ namespace __atomic0 bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -365,7 +369,8 @@ namespace __atomic0 bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -375,7 +380,7 @@ namespace __atomic0 bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_strong(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -383,54 +388,60 @@ namespace __atomic0 bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_strong(__i1, __i2, __m, __calculate_memory_order(__m)); } __int_type - fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_add(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, +=, __i, __m); } __int_type fetch_add(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, +=, __i, __m); } __int_type - fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_sub(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, -=, __i, __m); } __int_type fetch_sub(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, -=, __i, __m); } __int_type - fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_and(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, &=, __i, __m); } __int_type fetch_and(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, &=, __i, __m); } __int_type - fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_or(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, |=, __i, __m); } __int_type - fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + fetch_or(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, |=, __i, __m); } __int_type - fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_xor(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return _ATOMIC_MODIFY_(this, ^=, __i, __m); } __int_type fetch_xor(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _ATOMIC_MODIFY_(this, ^=, __i, __m); } }; @@ -445,93 +456,95 @@ namespace __atomic0 __pointer_type _M_i; public: - __atomic_base() = default; - ~__atomic_base() = default; + __atomic_base() noexcept = default; + ~__atomic_base() noexcept = default; __atomic_base(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) volatile = delete; // Requires __pointer_type convertible to _M_i. - constexpr __atomic_base(__return_pointer_type __p): _M_i (__p) { } + constexpr __atomic_base(__return_pointer_type __p) noexcept + : _M_i (__p) { } - operator __return_pointer_type() const + operator __return_pointer_type() const noexcept { return reinterpret_cast<__return_pointer_type>(load()); } - operator __return_pointer_type() const volatile + operator __return_pointer_type() const volatile noexcept { return reinterpret_cast<__return_pointer_type>(load()); } __return_pointer_type - operator=(__pointer_type __p) + operator=(__pointer_type __p) noexcept { store(__p); return reinterpret_cast<__return_pointer_type>(__p); } __return_pointer_type - operator=(__pointer_type __p) volatile + operator=(__pointer_type __p) volatile noexcept { store(__p); return reinterpret_cast<__return_pointer_type>(__p); } __return_pointer_type - operator++(int) + operator++(int) noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); } __return_pointer_type - operator++(int) volatile + operator++(int) volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); } __return_pointer_type - operator--(int) + operator--(int) noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); } __return_pointer_type - operator--(int) volatile + operator--(int) volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); } __return_pointer_type - operator++() + operator++() noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); } __return_pointer_type - operator++() volatile + operator++() volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); } __return_pointer_type - operator--() + operator--() noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); } __return_pointer_type - operator--() volatile + operator--() volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); } __return_pointer_type - operator+=(ptrdiff_t __d) + operator+=(ptrdiff_t __d) noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); } __return_pointer_type - operator+=(ptrdiff_t __d) volatile + operator+=(ptrdiff_t __d) volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); } __return_pointer_type - operator-=(ptrdiff_t __d) + operator-=(ptrdiff_t __d) noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); } __return_pointer_type - operator-=(ptrdiff_t __d) volatile + operator-=(ptrdiff_t __d) volatile noexcept { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); } bool - is_lock_free() const + is_lock_free() const noexcept { return true; } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return true; } void - store(__pointer_type __p, memory_order __m = memory_order_seq_cst) + store(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -541,7 +554,7 @@ namespace __atomic0 void store(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -556,7 +569,7 @@ namespace __atomic0 } __return_pointer_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -565,7 +578,7 @@ namespace __atomic0 } __return_pointer_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -574,7 +587,8 @@ namespace __atomic0 } __return_pointer_type - exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst) + exchange(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { void* __v = _ATOMIC_MODIFY_(this, =, __p, __m); return reinterpret_cast<__return_pointer_type>(__v); @@ -582,7 +596,7 @@ namespace __atomic0 __return_pointer_type exchange(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { volatile __pointer_type* __p2 = &_M_i; __typeof__(__p) __w = (__p); @@ -597,7 +611,7 @@ namespace __atomic0 bool compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -608,7 +622,8 @@ namespace __atomic0 bool compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -618,7 +633,8 @@ namespace __atomic0 } __return_pointer_type - fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_add(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m); return reinterpret_cast<__return_pointer_type>(__v); @@ -626,14 +642,15 @@ namespace __atomic0 __return_pointer_type fetch_add(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m); return reinterpret_cast<__return_pointer_type>(__v); } __return_pointer_type - fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_sub(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m); return reinterpret_cast<__return_pointer_type>(__v); @@ -641,7 +658,7 @@ namespace __atomic0 __return_pointer_type fetch_sub(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m); return reinterpret_cast<__return_pointer_type>(__v); diff --git a/libstdc++-v3/include/bits/atomic_2.h b/libstdc++-v3/include/bits/atomic_2.h index f95beca55c7..072e82a0a9f 100644 --- a/libstdc++-v3/include/bits/atomic_2.h +++ b/libstdc++-v3/include/bits/atomic_2.h @@ -48,17 +48,17 @@ namespace __atomic2 /// atomic_flag struct atomic_flag : public __atomic_flag_base { - atomic_flag() = default; - ~atomic_flag() = default; + atomic_flag() noexcept = default; + ~atomic_flag() noexcept = default; atomic_flag(const atomic_flag&) = delete; atomic_flag& operator=(const atomic_flag&) = delete; atomic_flag& operator=(const atomic_flag&) volatile = delete; // Conversion to ATOMIC_FLAG_INIT. - atomic_flag(bool __i): __atomic_flag_base({ __i }) { } + atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { } bool - test_and_set(memory_order __m = memory_order_seq_cst) + test_and_set(memory_order __m = memory_order_seq_cst) noexcept { // Redundant synchronize if built-in for lock is a full barrier. if (__m != memory_order_acquire && __m != memory_order_acq_rel) @@ -67,7 +67,7 @@ namespace __atomic2 } bool - test_and_set(memory_order __m = memory_order_seq_cst) volatile + test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept { // Redundant synchronize if built-in for lock is a full barrier. if (__m != memory_order_acquire && __m != memory_order_acq_rel) @@ -76,7 +76,7 @@ namespace __atomic2 } void - clear(memory_order __m = memory_order_seq_cst) + clear(memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_consume); __glibcxx_assert(__m != memory_order_acquire); @@ -88,7 +88,7 @@ namespace __atomic2 } void - clear(memory_order __m = memory_order_seq_cst) volatile + clear(memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_consume); __glibcxx_assert(__m != memory_order_acquire); @@ -133,117 +133,117 @@ namespace __atomic2 __int_type _M_i; public: - __atomic_base() = default; - ~__atomic_base() = default; + __atomic_base() noexcept = default; + ~__atomic_base() noexcept = default; __atomic_base(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) volatile = delete; // Requires __int_type convertible to _M_i. - constexpr __atomic_base(__int_type __i): _M_i (__i) { } + constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { } - operator __int_type() const + operator __int_type() const noexcept { return load(); } - operator __int_type() const volatile + operator __int_type() const volatile noexcept { return load(); } __int_type - operator=(__int_type __i) + operator=(__int_type __i) noexcept { store(__i); return __i; } __int_type - operator=(__int_type __i) volatile + operator=(__int_type __i) volatile noexcept { store(__i); return __i; } __int_type - operator++(int) + operator++(int) noexcept { return fetch_add(1); } __int_type - operator++(int) volatile + operator++(int) volatile noexcept { return fetch_add(1); } __int_type - operator--(int) + operator--(int) noexcept { return fetch_sub(1); } __int_type - operator--(int) volatile + operator--(int) volatile noexcept { return fetch_sub(1); } __int_type - operator++() + operator++() noexcept { return __sync_add_and_fetch(&_M_i, 1); } __int_type - operator++() volatile + operator++() volatile noexcept { return __sync_add_and_fetch(&_M_i, 1); } __int_type - operator--() + operator--() noexcept { return __sync_sub_and_fetch(&_M_i, 1); } __int_type - operator--() volatile + operator--() volatile noexcept { return __sync_sub_and_fetch(&_M_i, 1); } __int_type - operator+=(__int_type __i) + operator+=(__int_type __i) noexcept { return __sync_add_and_fetch(&_M_i, __i); } __int_type - operator+=(__int_type __i) volatile + operator+=(__int_type __i) volatile noexcept { return __sync_add_and_fetch(&_M_i, __i); } __int_type - operator-=(__int_type __i) + operator-=(__int_type __i) noexcept { return __sync_sub_and_fetch(&_M_i, __i); } __int_type - operator-=(__int_type __i) volatile + operator-=(__int_type __i) volatile noexcept { return __sync_sub_and_fetch(&_M_i, __i); } __int_type - operator&=(__int_type __i) + operator&=(__int_type __i) noexcept { return __sync_and_and_fetch(&_M_i, __i); } __int_type - operator&=(__int_type __i) volatile + operator&=(__int_type __i) volatile noexcept { return __sync_and_and_fetch(&_M_i, __i); } __int_type - operator|=(__int_type __i) + operator|=(__int_type __i) noexcept { return __sync_or_and_fetch(&_M_i, __i); } __int_type - operator|=(__int_type __i) volatile + operator|=(__int_type __i) volatile noexcept { return __sync_or_and_fetch(&_M_i, __i); } __int_type - operator^=(__int_type __i) + operator^=(__int_type __i) noexcept { return __sync_xor_and_fetch(&_M_i, __i); } __int_type - operator^=(__int_type __i) volatile + operator^=(__int_type __i) volatile noexcept { return __sync_xor_and_fetch(&_M_i, __i); } bool - is_lock_free() const + is_lock_free() const noexcept { return true; } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return true; } void - store(__int_type __i, memory_order __m = memory_order_seq_cst) + store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -261,7 +261,8 @@ namespace __atomic2 } void - store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + store(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -279,7 +280,7 @@ namespace __atomic2 } __int_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -291,7 +292,7 @@ namespace __atomic2 } __int_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -303,7 +304,8 @@ namespace __atomic2 } __int_type - exchange(__int_type __i, memory_order __m = memory_order_seq_cst) + exchange(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { // XXX built-in assumes memory_order_acquire. return __sync_lock_test_and_set(&_M_i, __i); @@ -311,7 +313,8 @@ namespace __atomic2 __int_type - exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile + exchange(__int_type __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { // XXX built-in assumes memory_order_acquire. return __sync_lock_test_and_set(&_M_i, __i); @@ -319,17 +322,18 @@ namespace __atomic2 bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { return compare_exchange_strong(__i1, __i2, __m1, __m2); } bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { return compare_exchange_strong(__i1, __i2, __m1, __m2); } bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_weak(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -337,7 +341,7 @@ namespace __atomic2 bool compare_exchange_weak(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_weak(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -345,7 +349,7 @@ namespace __atomic2 bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -361,7 +365,8 @@ namespace __atomic2 bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -377,7 +382,7 @@ namespace __atomic2 bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_strong(__i1, __i2, __m, __calculate_memory_order(__m)); @@ -385,55 +390,60 @@ namespace __atomic2 bool compare_exchange_strong(__int_type& __i1, __int_type __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_strong(__i1, __i2, __m, __calculate_memory_order(__m)); } __int_type - fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_add(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_add(&_M_i, __i); } __int_type fetch_add(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_add(&_M_i, __i); } __int_type - fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_sub(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_sub(&_M_i, __i); } __int_type fetch_sub(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_sub(&_M_i, __i); } __int_type - fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_and(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_and(&_M_i, __i); } __int_type fetch_and(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_and(&_M_i, __i); } __int_type - fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_or(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_or(&_M_i, __i); } __int_type fetch_or(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_or(&_M_i, __i); } __int_type - fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst) + fetch_xor(__int_type __i, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_xor(&_M_i, __i); } __int_type fetch_xor(__int_type __i, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_xor(&_M_i, __i); } }; @@ -448,93 +458,94 @@ namespace __atomic2 __pointer_type _M_p; public: - __atomic_base() = default; - ~__atomic_base() = default; + __atomic_base() noexcept = default; + ~__atomic_base() noexcept = default; __atomic_base(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) = delete; __atomic_base& operator=(const __atomic_base&) volatile = delete; // Requires __pointer_type convertible to _M_p. - constexpr __atomic_base(__pointer_type __p): _M_p (__p) { } + constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { } - operator __pointer_type() const + operator __pointer_type() const noexcept { return load(); } - operator __pointer_type() const volatile + operator __pointer_type() const volatile noexcept { return load(); } __pointer_type - operator=(__pointer_type __p) + operator=(__pointer_type __p) noexcept { store(__p); return __p; } __pointer_type - operator=(__pointer_type __p) volatile + operator=(__pointer_type __p) volatile noexcept { store(__p); return __p; } __pointer_type - operator++(int) + operator++(int) noexcept { return fetch_add(1); } __pointer_type - operator++(int) volatile + operator++(int) volatile noexcept { return fetch_add(1); } __pointer_type - operator--(int) + operator--(int) noexcept { return fetch_sub(1); } __pointer_type - operator--(int) volatile + operator--(int) volatile noexcept { return fetch_sub(1); } __pointer_type - operator++() + operator++() noexcept { return fetch_add(1) + 1; } __pointer_type - operator++() volatile + operator++() volatile noexcept { return fetch_add(1) + 1; } __pointer_type - operator--() + operator--() noexcept { return fetch_sub(1) -1; } __pointer_type - operator--() volatile + operator--() volatile noexcept { return fetch_sub(1) -1; } __pointer_type - operator+=(ptrdiff_t __d) + operator+=(ptrdiff_t __d) noexcept { return fetch_add(__d) + __d; } __pointer_type - operator+=(ptrdiff_t __d) volatile + operator+=(ptrdiff_t __d) volatile noexcept { return fetch_add(__d) + __d; } __pointer_type - operator-=(ptrdiff_t __d) + operator-=(ptrdiff_t __d) noexcept { return fetch_sub(__d) - __d; } __pointer_type - operator-=(ptrdiff_t __d) volatile + operator-=(ptrdiff_t __d) volatile noexcept { return fetch_sub(__d) - __d; } bool - is_lock_free() const + is_lock_free() const noexcept { return true; } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return true; } void - store(__pointer_type __p, memory_order __m = memory_order_seq_cst) + store(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -553,7 +564,7 @@ namespace __atomic2 void store(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { __glibcxx_assert(__m != memory_order_acquire); __glibcxx_assert(__m != memory_order_acq_rel); @@ -571,7 +582,7 @@ namespace __atomic2 } __pointer_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -583,7 +594,7 @@ namespace __atomic2 } __pointer_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { __glibcxx_assert(__m != memory_order_release); __glibcxx_assert(__m != memory_order_acq_rel); @@ -595,7 +606,8 @@ namespace __atomic2 } __pointer_type - exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst) + exchange(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { // XXX built-in assumes memory_order_acquire. return __sync_lock_test_and_set(&_M_p, __p); @@ -604,7 +616,7 @@ namespace __atomic2 __pointer_type exchange(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { // XXX built-in assumes memory_order_acquire. return __sync_lock_test_and_set(&_M_p, __p); @@ -612,7 +624,8 @@ namespace __atomic2 bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) + memory_order __m1, + memory_order __m2) noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -628,7 +641,8 @@ namespace __atomic2 bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { __glibcxx_assert(__m2 != memory_order_release); __glibcxx_assert(__m2 != memory_order_acq_rel); @@ -643,21 +657,23 @@ namespace __atomic2 } __pointer_type - fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_add(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_add(&_M_p, __d); } __pointer_type fetch_add(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_add(&_M_p, __d); } __pointer_type - fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_sub(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { return __sync_fetch_and_sub(&_M_p, __d); } __pointer_type fetch_sub(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return __sync_fetch_and_sub(&_M_p, __d); } }; diff --git a/libstdc++-v3/include/bits/atomic_base.h b/libstdc++-v3/include/bits/atomic_base.h index 272a4cd4cfd..ebb7d58ac72 100644 --- a/libstdc++-v3/include/bits/atomic_base.h +++ b/libstdc++-v3/include/bits/atomic_base.h @@ -59,7 +59,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION } memory_order; inline memory_order - __calculate_memory_order(memory_order __m) + __calculate_memory_order(memory_order __m) noexcept { const bool __cond1 = __m == memory_order_release; const bool __cond2 = __m == memory_order_acq_rel; @@ -69,15 +69,15 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION } void - atomic_thread_fence(memory_order); + atomic_thread_fence(memory_order) noexcept; void - atomic_signal_fence(memory_order); + atomic_signal_fence(memory_order) noexcept; /// kill_dependency template inline _Tp - kill_dependency(_Tp __y) + kill_dependency(_Tp __y) noexcept { _Tp __ret(__y); return __ret; diff --git a/libstdc++-v3/include/std/atomic b/libstdc++-v3/include/std/atomic index a19891dbdbc..b3fa7d8120b 100644 --- a/libstdc++-v3/include/std/atomic +++ b/libstdc++-v3/include/std/atomic @@ -59,92 +59,93 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION __atomic_base _M_base; public: - atomic_bool() = default; - ~atomic_bool() = default; + atomic_bool() noexcept = default; + ~atomic_bool() noexcept = default; atomic_bool(const atomic_bool&) = delete; atomic_bool& operator=(const atomic_bool&) = delete; atomic_bool& operator=(const atomic_bool&) volatile = delete; - constexpr atomic_bool(bool __i) : _M_base(__i) { } + constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { } bool - operator=(bool __i) + operator=(bool __i) noexcept { return _M_base.operator=(__i); } - operator bool() const + operator bool() const noexcept { return _M_base.load(); } - operator bool() const volatile + operator bool() const volatile noexcept { return _M_base.load(); } bool - is_lock_free() const { return _M_base.is_lock_free(); } + is_lock_free() const noexcept { return _M_base.is_lock_free(); } bool - is_lock_free() const volatile { return _M_base.is_lock_free(); } + is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); } void - store(bool __i, memory_order __m = memory_order_seq_cst) + store(bool __i, memory_order __m = memory_order_seq_cst) noexcept { _M_base.store(__i, __m); } void - store(bool __i, memory_order __m = memory_order_seq_cst) volatile + store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept { _M_base.store(__i, __m); } bool - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { return _M_base.load(__m); } bool - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { return _M_base.load(__m); } bool - exchange(bool __i, memory_order __m = memory_order_seq_cst) + exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept { return _M_base.exchange(__i, __m); } bool - exchange(bool __i, memory_order __m = memory_order_seq_cst) volatile + exchange(bool __i, + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_base.exchange(__i, __m); } bool compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, - memory_order __m2) + memory_order __m2) noexcept { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } bool compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1, - memory_order __m2) volatile + memory_order __m2) volatile noexcept { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); } bool compare_exchange_weak(bool& __i1, bool __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return _M_base.compare_exchange_weak(__i1, __i2, __m); } bool compare_exchange_weak(bool& __i1, bool __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_base.compare_exchange_weak(__i1, __i2, __m); } bool compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, - memory_order __m2) + memory_order __m2) noexcept { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } bool compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1, - memory_order __m2) volatile + memory_order __m2) volatile noexcept { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); } bool compare_exchange_strong(bool& __i1, bool __i2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return _M_base.compare_exchange_strong(__i1, __i2, __m); } bool compare_exchange_strong(bool& __i1, bool __i2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_base.compare_exchange_strong(__i1, __i2, __m); } }; @@ -158,73 +159,77 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION _Tp _M_i; public: - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(_Tp __i) : _M_i(__i) { } + constexpr atomic(_Tp __i) noexcept : _M_i(__i) { } - operator _Tp() const; + operator _Tp() const noexcept; - operator _Tp() const volatile; + operator _Tp() const volatile noexcept; _Tp - operator=(_Tp __i) { store(__i); return __i; } + operator=(_Tp __i) noexcept { store(__i); return __i; } _Tp - operator=(_Tp __i) volatile { store(__i); return __i; } + operator=(_Tp __i) volatile noexcept { store(__i); return __i; } bool - is_lock_free() const; + is_lock_free() const noexcept; bool - is_lock_free() const volatile; + is_lock_free() const volatile noexcept; void - store(_Tp, memory_order = memory_order_seq_cst); + store(_Tp, memory_order = memory_order_seq_cst) noexcept; void - store(_Tp, memory_order = memory_order_seq_cst) volatile; + store(_Tp, memory_order = memory_order_seq_cst) volatile noexcept; _Tp - load(memory_order = memory_order_seq_cst) const; + load(memory_order = memory_order_seq_cst) const noexcept; _Tp - load(memory_order = memory_order_seq_cst) const volatile; + load(memory_order = memory_order_seq_cst) const volatile noexcept; _Tp - exchange(_Tp __i, memory_order = memory_order_seq_cst); + exchange(_Tp __i, memory_order = memory_order_seq_cst) noexcept; _Tp - exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile; + exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile noexcept; bool - compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order); + compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) noexcept; bool - compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) volatile; + compare_exchange_weak(_Tp&, _Tp, memory_order, + memory_order) volatile noexcept; bool - compare_exchange_weak(_Tp&, _Tp, memory_order = memory_order_seq_cst); + compare_exchange_weak(_Tp&, _Tp, + memory_order = memory_order_seq_cst) noexcept; bool compare_exchange_weak(_Tp&, _Tp, - memory_order = memory_order_seq_cst) volatile; + memory_order = memory_order_seq_cst) volatile noexcept; bool - compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order); + compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) noexcept; bool - compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) volatile; + compare_exchange_strong(_Tp&, _Tp, memory_order, + memory_order) volatile noexcept; bool - compare_exchange_strong(_Tp&, _Tp, memory_order = memory_order_seq_cst); + compare_exchange_strong(_Tp&, _Tp, + memory_order = memory_order_seq_cst) noexcept; bool compare_exchange_strong(_Tp&, _Tp, - memory_order = memory_order_seq_cst) volatile; + memory_order = memory_order_seq_cst) volatile noexcept; }; @@ -236,123 +241,126 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef __atomic_base<_Tp*> __base_type; __base_type _M_b; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__pointer_type __p) : _M_b(__p) { } + constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { } - operator __pointer_type() const + operator __pointer_type() const noexcept { return __pointer_type(_M_b); } - operator __pointer_type() const volatile + operator __pointer_type() const volatile noexcept { return __pointer_type(_M_b); } __pointer_type - operator=(__pointer_type __p) + operator=(__pointer_type __p) noexcept { return _M_b.operator=(__p); } __pointer_type - operator=(__pointer_type __p) volatile + operator=(__pointer_type __p) volatile noexcept { return _M_b.operator=(__p); } __pointer_type - operator++(int) + operator++(int) noexcept { return _M_b++; } __pointer_type - operator++(int) volatile + operator++(int) volatile noexcept { return _M_b++; } __pointer_type - operator--(int) + operator--(int) noexcept { return _M_b--; } __pointer_type - operator--(int) volatile + operator--(int) volatile noexcept { return _M_b--; } __pointer_type - operator++() + operator++() noexcept { return ++_M_b; } __pointer_type - operator++() volatile + operator++() volatile noexcept { return ++_M_b; } __pointer_type - operator--() + operator--() noexcept { return --_M_b; } __pointer_type - operator--() volatile + operator--() volatile noexcept { return --_M_b; } __pointer_type - operator+=(ptrdiff_t __d) + operator+=(ptrdiff_t __d) noexcept { return _M_b.operator+=(__d); } __pointer_type - operator+=(ptrdiff_t __d) volatile + operator+=(ptrdiff_t __d) volatile noexcept { return _M_b.operator+=(__d); } __pointer_type - operator-=(ptrdiff_t __d) + operator-=(ptrdiff_t __d) noexcept { return _M_b.operator-=(__d); } __pointer_type - operator-=(ptrdiff_t __d) volatile + operator-=(ptrdiff_t __d) volatile noexcept { return _M_b.operator-=(__d); } bool - is_lock_free() const + is_lock_free() const noexcept { return _M_b.is_lock_free(); } bool - is_lock_free() const volatile + is_lock_free() const volatile noexcept { return _M_b.is_lock_free(); } void - store(__pointer_type __p, memory_order __m = memory_order_seq_cst) + store(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.store(__p, __m); } void store(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.store(__p, __m); } __pointer_type - load(memory_order __m = memory_order_seq_cst) const + load(memory_order __m = memory_order_seq_cst) const noexcept { return _M_b.load(__m); } __pointer_type - load(memory_order __m = memory_order_seq_cst) const volatile + load(memory_order __m = memory_order_seq_cst) const volatile noexcept { return _M_b.load(__m); } __pointer_type - exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst) + exchange(__pointer_type __p, + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.exchange(__p, __m); } __pointer_type exchange(__pointer_type __p, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.exchange(__p, __m); } bool compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } bool compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } bool compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return compare_exchange_weak(__p1, __p2, __m, __calculate_memory_order(__m)); @@ -360,7 +368,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION bool compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return compare_exchange_weak(__p1, __p2, __m, __calculate_memory_order(__m)); @@ -368,17 +376,18 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) + memory_order __m1, memory_order __m2) noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m1, memory_order __m2) volatile + memory_order __m1, + memory_order __m2) volatile noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); } bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m = memory_order_seq_cst) + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m, __calculate_memory_order(__m)); @@ -386,28 +395,30 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION bool compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.compare_exchange_strong(__p1, __p2, __m, __calculate_memory_order(__m)); } __pointer_type - fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_add(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.fetch_add(__d, __m); } __pointer_type fetch_add(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.fetch_add(__d, __m); } __pointer_type - fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) + fetch_sub(ptrdiff_t __d, + memory_order __m = memory_order_seq_cst) noexcept { return _M_b.fetch_sub(__d, __m); } __pointer_type fetch_sub(ptrdiff_t __d, - memory_order __m = memory_order_seq_cst) volatile + memory_order __m = memory_order_seq_cst) volatile noexcept { return _M_b.fetch_sub(__d, __m); } }; @@ -419,13 +430,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef bool __integral_type; typedef atomic_bool __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -438,13 +449,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef char __integral_type; typedef atomic_char __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -457,13 +468,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef signed char __integral_type; typedef atomic_schar __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept= default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -476,13 +487,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef unsigned char __integral_type; typedef atomic_uchar __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept= default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -495,13 +506,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef short __integral_type; typedef atomic_short __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -514,13 +525,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef unsigned short __integral_type; typedef atomic_ushort __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -533,13 +544,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef int __integral_type; typedef atomic_int __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -552,13 +563,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef unsigned int __integral_type; typedef atomic_uint __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -571,13 +582,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef long __integral_type; typedef atomic_long __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -590,13 +601,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef unsigned long __integral_type; typedef atomic_ulong __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -609,13 +620,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef long long __integral_type; typedef atomic_llong __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -628,13 +639,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef unsigned long long __integral_type; typedef atomic_ullong __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -647,13 +658,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef wchar_t __integral_type; typedef atomic_wchar_t __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -666,13 +677,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef char16_t __integral_type; typedef atomic_char16_t __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -685,13 +696,13 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION typedef char32_t __integral_type; typedef atomic_char32_t __base_type; - atomic() = default; - ~atomic() = default; + atomic() noexcept = default; + ~atomic() noexcept = default; atomic(const atomic&) = delete; atomic& operator=(const atomic&) = delete; atomic& operator=(const atomic&) volatile = delete; - constexpr atomic(__integral_type __i) : __base_type(__i) { } + constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { } using __base_type::operator __integral_type; using __base_type::operator=; @@ -700,104 +711,109 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION // Function definitions, atomic_flag operations. inline bool - atomic_flag_test_and_set_explicit(atomic_flag* __a, memory_order __m) + atomic_flag_test_and_set_explicit(atomic_flag* __a, + memory_order __m) noexcept { return __a->test_and_set(__m); } inline bool atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, - memory_order __m) + memory_order __m) noexcept { return __a->test_and_set(__m); } inline void - atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) + atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept { __a->clear(__m); } inline void - atomic_flag_clear_explicit(volatile atomic_flag* __a, memory_order __m) + atomic_flag_clear_explicit(volatile atomic_flag* __a, + memory_order __m) noexcept { __a->clear(__m); } inline bool - atomic_flag_test_and_set(atomic_flag* __a) + atomic_flag_test_and_set(atomic_flag* __a) noexcept { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } inline bool - atomic_flag_test_and_set(volatile atomic_flag* __a) + atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); } inline void - atomic_flag_clear(atomic_flag* __a) + atomic_flag_clear(atomic_flag* __a) noexcept { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } inline void - atomic_flag_clear(volatile atomic_flag* __a) + atomic_flag_clear(volatile atomic_flag* __a) noexcept { atomic_flag_clear_explicit(__a, memory_order_seq_cst); } // Function templates generally applicable to atomic types. template inline bool - atomic_is_lock_free(const atomic<_ITp>* __a) + atomic_is_lock_free(const atomic<_ITp>* __a) noexcept { return __a->is_lock_free(); } template inline bool - atomic_is_lock_free(const volatile atomic<_ITp>* __a) + atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept { return __a->is_lock_free(); } template inline void - atomic_init(atomic<_ITp>* __a, _ITp __i); + atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept; template inline void - atomic_init(volatile atomic<_ITp>* __a, _ITp __i); + atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept; template inline void - atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, memory_order __m) + atomic_store_explicit(atomic<_ITp>* __a, _ITp __i, + memory_order __m) noexcept { __a->store(__i, __m); } template inline void atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { __a->store(__i, __m); } template inline _ITp - atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) + atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept { return __a->load(__m); } template inline _ITp atomic_load_explicit(const volatile atomic<_ITp>* __a, - memory_order __m) + memory_order __m) noexcept { return __a->load(__m); } template inline _ITp atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->exchange(__i, __m); } template inline _ITp atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->exchange(__i, __m); } template inline bool atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a, _ITp* __i1, _ITp __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, + memory_order __m2) noexcept { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } template inline bool atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a, _ITp* __i1, _ITp __i2, - memory_order __m1, memory_order __m2) + memory_order __m1, + memory_order __m2) noexcept { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); } template @@ -805,7 +821,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a, _ITp* __i1, _ITp __i2, memory_order __m1, - memory_order __m2) + memory_order __m2) noexcept { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } template @@ -813,44 +829,44 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a, _ITp* __i1, _ITp __i2, memory_order __m1, - memory_order __m2) + memory_order __m2) noexcept { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); } template inline void - atomic_store(atomic<_ITp>* __a, _ITp __i) + atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept { atomic_store_explicit(__a, __i, memory_order_seq_cst); } template inline void - atomic_store(volatile atomic<_ITp>* __a, _ITp __i) + atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept { atomic_store_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_load(const atomic<_ITp>* __a) + atomic_load(const atomic<_ITp>* __a) noexcept { return atomic_load_explicit(__a, memory_order_seq_cst); } template inline _ITp - atomic_load(const volatile atomic<_ITp>* __a) + atomic_load(const volatile atomic<_ITp>* __a) noexcept { return atomic_load_explicit(__a, memory_order_seq_cst); } template inline _ITp - atomic_exchange(atomic<_ITp>* __a, _ITp __i) + atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) + atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); } template inline bool atomic_compare_exchange_weak(atomic<_ITp>* __a, - _ITp* __i1, _ITp __i2) + _ITp* __i1, _ITp __i2) noexcept { return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, memory_order_seq_cst, @@ -860,7 +876,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION template inline bool atomic_compare_exchange_weak(volatile atomic<_ITp>* __a, - _ITp* __i1, _ITp __i2) + _ITp* __i1, _ITp __i2) noexcept { return atomic_compare_exchange_weak_explicit(__a, __i1, __i2, memory_order_seq_cst, @@ -870,7 +886,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION template inline bool atomic_compare_exchange_strong(atomic<_ITp>* __a, - _ITp* __i1, _ITp __i2) + _ITp* __i1, _ITp __i2) noexcept { return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, memory_order_seq_cst, @@ -880,7 +896,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION template inline bool atomic_compare_exchange_strong(volatile atomic<_ITp>* __a, - _ITp* __i1, _ITp __i2) + _ITp* __i1, _ITp __i2) noexcept { return atomic_compare_exchange_strong_explicit(__a, __i1, __i2, memory_order_seq_cst, @@ -894,111 +910,111 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION template inline _ITp atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_add(__i, __m); } template inline _ITp atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_add(__i, __m); } template inline _ITp atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_sub(__i, __m); } template inline _ITp atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_sub(__i, __m); } template inline _ITp atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_and(__i, __m); } template inline _ITp atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_and(__i, __m); } template inline _ITp atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_or(__i, __m); } template inline _ITp atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_or(__i, __m); } template inline _ITp atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_xor(__i, __m); } template inline _ITp atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_xor(__i, __m); } template inline _ITp - atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } template inline _ITp - atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) + atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); } @@ -1006,45 +1022,45 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION template inline _ITp* atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_add(__d, __m); } template inline _ITp* atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_add(__d, __m); } template inline _ITp* - atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) + atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept { return __a->fetch_add(__d); } template inline _ITp* - atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) + atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept { return __a->fetch_add(__d); } template inline _ITp* atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a, - ptrdiff_t __d, memory_order __m) + ptrdiff_t __d, memory_order __m) noexcept { return __a->fetch_sub(__d, __m); } template inline _ITp* atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d, - memory_order __m) + memory_order __m) noexcept { return __a->fetch_sub(__d, __m); } template inline _ITp* - atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) + atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept { return __a->fetch_sub(__d); } template inline _ITp* - atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) + atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept { return __a->fetch_sub(__d); } // @} group atomics diff --git a/libstdc++-v3/src/atomic.cc b/libstdc++-v3/src/atomic.cc index e655dd3e8e6..5752d39feed 100644 --- a/libstdc++-v3/src/atomic.cc +++ b/libstdc++-v3/src/atomic.cc @@ -1,6 +1,6 @@ // Support for atomic operations -*- C++ -*- -// Copyright (C) 2008, 2009, 2010 +// Copyright (C) 2008, 2009, 2010, 2011 // Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free @@ -56,7 +56,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION namespace __atomic0 { bool - atomic_flag::test_and_set(memory_order) + atomic_flag::test_and_set(memory_order) noexcept { #if defined(_GLIBCXX_HAS_GTHREADS) && defined(_GLIBCXX_USE_C99_STDINT_TR1) lock_guard __lock(get_atomic_mutex()); @@ -67,7 +67,7 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION } void - atomic_flag::clear(memory_order) + atomic_flag::clear(memory_order) noexcept { #if defined(_GLIBCXX_HAS_GTHREADS) && defined(_GLIBCXX_USE_C99_STDINT_TR1) lock_guard __lock(get_atomic_mutex());