*/
/// Enumeration for memory_order
+#if __cplusplus > 201703L
+ enum class memory_order : int
+ {
+ relaxed,
+ consume,
+ acquire,
+ release,
+ acq_rel,
+ seq_cst
+ };
+
+ inline constexpr memory_order memory_order_relaxed = memory_order::relaxed;
+ inline constexpr memory_order memory_order_consume = memory_order::consume;
+ inline constexpr memory_order memory_order_acquire = memory_order::acquire;
+ inline constexpr memory_order memory_order_release = memory_order::release;
+ inline constexpr memory_order memory_order_acq_rel = memory_order::acq_rel;
+ inline constexpr memory_order memory_order_seq_cst = memory_order::seq_cst;
+#else
typedef enum memory_order
{
memory_order_relaxed,
memory_order_acq_rel,
memory_order_seq_cst
} memory_order;
+#endif
enum __memory_order_modifier
{
constexpr memory_order
operator|(memory_order __m, __memory_order_modifier __mod)
{
- return memory_order(__m | int(__mod));
+ return memory_order(int(__m) | int(__mod));
}
constexpr memory_order
operator&(memory_order __m, __memory_order_modifier __mod)
{
- return memory_order(__m & int(__mod));
+ return memory_order(int(__m) & int(__mod));
}
// Drop release ordering as per [atomics.types.operations.req]/21
__cmpexch_failure_order(memory_order __m) noexcept
{
return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
- | (__m & __memory_order_modifier_mask));
+ | __memory_order_modifier(__m & __memory_order_modifier_mask));
}
_GLIBCXX_ALWAYS_INLINE void
atomic_thread_fence(memory_order __m) noexcept
- { __atomic_thread_fence(__m); }
+ { __atomic_thread_fence(int(__m)); }
_GLIBCXX_ALWAYS_INLINE void
atomic_signal_fence(memory_order __m) noexcept
- { __atomic_signal_fence(__m); }
+ { __atomic_signal_fence(int(__m)); }
/// kill_dependency
template<typename _Tp>
_GLIBCXX_ALWAYS_INLINE bool
test_and_set(memory_order __m = memory_order_seq_cst) noexcept
{
- return __atomic_test_and_set (&_M_i, __m);
+ return __atomic_test_and_set (&_M_i, int(__m));
}
_GLIBCXX_ALWAYS_INLINE bool
test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
{
- return __atomic_test_and_set (&_M_i, __m);
+ return __atomic_test_and_set (&_M_i, int(__m));
}
_GLIBCXX_ALWAYS_INLINE void
__glibcxx_assert(__b != memory_order_acquire);
__glibcxx_assert(__b != memory_order_acq_rel);
- __atomic_clear (&_M_i, __m);
+ __atomic_clear (&_M_i, int(__m));
}
_GLIBCXX_ALWAYS_INLINE void
__glibcxx_assert(__b != memory_order_acquire);
__glibcxx_assert(__b != memory_order_acq_rel);
- __atomic_clear (&_M_i, __m);
+ __atomic_clear (&_M_i, int(__m));
}
private:
__glibcxx_assert(__b != memory_order_acq_rel);
__glibcxx_assert(__b != memory_order_consume);
- __atomic_store_n(&_M_i, __i, __m);
+ __atomic_store_n(&_M_i, __i, int(__m));
}
_GLIBCXX_ALWAYS_INLINE void
__glibcxx_assert(__b != memory_order_acq_rel);
__glibcxx_assert(__b != memory_order_consume);
- __atomic_store_n(&_M_i, __i, __m);
+ __atomic_store_n(&_M_i, __i, int(__m));
}
_GLIBCXX_ALWAYS_INLINE __int_type
__glibcxx_assert(__b != memory_order_release);
__glibcxx_assert(__b != memory_order_acq_rel);
- return __atomic_load_n(&_M_i, __m);
+ return __atomic_load_n(&_M_i, int(__m));
}
_GLIBCXX_ALWAYS_INLINE __int_type
__glibcxx_assert(__b != memory_order_release);
__glibcxx_assert(__b != memory_order_acq_rel);
- return __atomic_load_n(&_M_i, __m);
+ return __atomic_load_n(&_M_i, int(__m));
}
_GLIBCXX_ALWAYS_INLINE __int_type
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{
- return __atomic_exchange_n(&_M_i, __i, __m);
+ return __atomic_exchange_n(&_M_i, __i, int(__m));
}
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
- return __atomic_exchange_n(&_M_i, __i, __m);
+ return __atomic_exchange_n(&_M_i, __i, int(__m));
}
_GLIBCXX_ALWAYS_INLINE bool
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
- return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
+ return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1,
+ int(__m1), int(__m2));
}
_GLIBCXX_ALWAYS_INLINE bool
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
- return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
+ return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1,
+ int(__m1), int(__m2));
}
_GLIBCXX_ALWAYS_INLINE bool
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
- return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
+ return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0,
+ int(__m1), int(__m2));
}
_GLIBCXX_ALWAYS_INLINE bool
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
- return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
+ return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0,
+ int(__m1), int(__m2));
}
_GLIBCXX_ALWAYS_INLINE bool
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
- { return __atomic_fetch_add(&_M_i, __i, __m); }
+ { return __atomic_fetch_add(&_M_i, __i, int(__m)); }
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
- { return __atomic_fetch_add(&_M_i, __i, __m); }
+ { return __atomic_fetch_add(&_M_i, __i, int(__m)); }
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
- { return __atomic_fetch_sub(&_M_i, __i, __m); }
+ { return __atomic_fetch_sub(&_M_i, __i, int(__m)); }
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
- { return __atomic_fetch_sub(&_M_i, __i, __m); }
+ { return __atomic_fetch_sub(&_M_i, __i, int(__m)); }
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
- { return __atomic_fetch_and(&_M_i, __i, __m); }
+ { return __atomic_fetch_and(&_M_i, __i, int(__m)); }
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
- { return __atomic_fetch_and(&_M_i, __i, __m); }
+ { return __atomic_fetch_and(&_M_i, __i, int(__m)); }
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
- { return __atomic_fetch_or(&_M_i, __i, __m); }
+ { return __atomic_fetch_or(&_M_i, __i, int(__m)); }
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
- { return __atomic_fetch_or(&_M_i, __i, __m); }
+ { return __atomic_fetch_or(&_M_i, __i, int(__m)); }
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
- { return __atomic_fetch_xor(&_M_i, __i, __m); }
+ { return __atomic_fetch_xor(&_M_i, __i, int(__m)); }
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
- { return __atomic_fetch_xor(&_M_i, __i, __m); }
+ { return __atomic_fetch_xor(&_M_i, __i, int(__m)); }
};
__glibcxx_assert(__b != memory_order_acq_rel);
__glibcxx_assert(__b != memory_order_consume);
- __atomic_store_n(&_M_p, __p, __m);
+ __atomic_store_n(&_M_p, __p, int(__m));
}
_GLIBCXX_ALWAYS_INLINE void
__glibcxx_assert(__b != memory_order_acq_rel);
__glibcxx_assert(__b != memory_order_consume);
- __atomic_store_n(&_M_p, __p, __m);
+ __atomic_store_n(&_M_p, __p, int(__m));
}
_GLIBCXX_ALWAYS_INLINE __pointer_type
__glibcxx_assert(__b != memory_order_release);
__glibcxx_assert(__b != memory_order_acq_rel);
- return __atomic_load_n(&_M_p, __m);
+ return __atomic_load_n(&_M_p, int(__m));
}
_GLIBCXX_ALWAYS_INLINE __pointer_type
__glibcxx_assert(__b != memory_order_release);
__glibcxx_assert(__b != memory_order_acq_rel);
- return __atomic_load_n(&_M_p, __m);
+ return __atomic_load_n(&_M_p, int(__m));
}
_GLIBCXX_ALWAYS_INLINE __pointer_type
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
- return __atomic_exchange_n(&_M_p, __p, __m);
+ return __atomic_exchange_n(&_M_p, __p, int(__m));
}
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
- return __atomic_exchange_n(&_M_p, __p, __m);
+ return __atomic_exchange_n(&_M_p, __p, int(__m));
}
_GLIBCXX_ALWAYS_INLINE bool
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
- return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
+ return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0,
+ int(__m1), int(__m2));
}
_GLIBCXX_ALWAYS_INLINE bool
__glibcxx_assert(__b2 != memory_order_acq_rel);
__glibcxx_assert(__b2 <= __b1);
- return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
+ return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0,
+ int(__m1), int(__m2));
}
_GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
- { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
+ { return __atomic_fetch_add(&_M_p, _M_type_size(__d), int(__m)); }
_GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
- { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
+ { return __atomic_fetch_add(&_M_p, _M_type_size(__d), int(__m)); }
_GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
- { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
+ { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), int(__m)); }
_GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
- { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
+ { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), int(__m)); }
};
// @} group atomics