return __v == __GCC_ATOMIC_TEST_AND_SET_TRUEVAL;
}
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE void
wait(bool __old,
memory_order __m = memory_order_seq_cst) const noexcept
{ std::__atomic_notify(&_M_i, true); }
// TODO add const volatile overload
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
#endif // C++20
_GLIBCXX_ALWAYS_INLINE void
__cmpexch_failure_order(__m));
}
-#if __cplusplus > 201703L
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#if __cplusplus > 201703L && defined _GLIBCXX_HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE void
wait(__int_type __old,
memory_order __m = memory_order_seq_cst) const noexcept
{ std::__atomic_notify(&_M_i, true); }
// TODO add const volatile overload
-#endif // GTHREADS || LINUX_FUTEX
-#endif // C++2a
+#endif // C++20 && HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE __int_type
fetch_add(__int_type __i,
int(__m1), int(__m2));
}
-#if __cplusplus > 201703L
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#if __cplusplus > 201703L && defined _GLIBCXX_HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE void
wait(__pointer_type __old,
memory_order __m = memory_order_seq_cst) noexcept
{ std::__atomic_notify(&_M_p, true); }
// TODO add const volatile overload
-#endif // GTHREADS || LINUX_FUTEX
-#endif // C++2a
+#endif // C++20 && HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_add(ptrdiff_t __d,
int(__success), int(__failure));
}
-#if __cplusplus > 201703L
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#if __cplusplus > 201703L && defined _GLIBCXX_HAVE_ATOMIC_WAIT
template<typename _Tp>
_GLIBCXX_ALWAYS_INLINE void
wait(const _Tp* __ptr, _Val<_Tp> __old,
{ std::__atomic_notify(__ptr, true); }
// TODO add const volatile overload
-#endif // GTHREADS || LINUX_FUTEX
-#endif // C++2a
+#endif // C++20 && HAVE_ATOMIC_WAIT
template<typename _Tp>
_GLIBCXX_ALWAYS_INLINE _Tp
__cmpexch_failure_order(__order));
}
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE void
wait(_Fp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(&_M_fp, __old, __m); }
{ __atomic_impl::notify_all(&_M_fp); }
// TODO add const volatile overload
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
value_type
fetch_add(value_type __i,
__cmpexch_failure_order(__order));
}
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE void
wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(_M_ptr, __old, __m); }
{ __atomic_impl::notify_all(_M_ptr); }
// TODO add const volatile overload
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
private:
_Tp* _M_ptr;
__cmpexch_failure_order(__order));
}
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE void
wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(_M_ptr, __old, __m); }
{ __atomic_impl::notify_all(_M_ptr); }
// TODO add const volatile overload
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
value_type
fetch_add(value_type __i,
__cmpexch_failure_order(__order));
}
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE void
wait(_Fp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(_M_ptr, __old, __m); }
{ __atomic_impl::notify_all(_M_ptr); }
// TODO add const volatile overload
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
value_type
fetch_add(value_type __i,
__cmpexch_failure_order(__order));
}
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE void
wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(_M_ptr, __old, __m); }
{ __atomic_impl::notify_all(_M_ptr); }
// TODO add const volatile overload
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
_GLIBCXX_ALWAYS_INLINE value_type
fetch_add(difference_type __d,
#pragma GCC system_header
-#include <bits/c++config.h>
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
-#include <bits/functional_hash.h>
#include <bits/atomic_wait.h>
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
+#include <bits/functional_hash.h>
#include <chrono>
}
_GLIBCXX_END_NAMESPACE_VERSION
} // namespace std
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
#endif // _GLIBCXX_ATOMIC_TIMED_WAIT_H
#pragma GCC system_header
#include <bits/c++config.h>
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#if defined _GLIBCXX_HAS_GTHREADS || defined _GLIBCXX_HAVE_LINUX_FUTEX
#include <bits/functional_hash.h>
#include <bits/gthr.h>
#include <ext/numeric_traits.h>
# include <bits/std_mutex.h> // std::mutex, std::__condvar
#endif
+// Other headers use this to check for the facilities defined in this header.
+#define _GLIBCXX_HAVE_ATOMIC_WAIT 1
namespace std _GLIBCXX_VISIBILITY(default)
{
#pragma GCC system_header
-#include <bits/c++config.h>
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
#include <bits/atomic_base.h>
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
#include <bits/atomic_timed_wait.h>
#include <ext/numeric_traits.h>
_GLIBCXX_END_NAMESPACE_VERSION
} // namespace std
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
#endif // _GLIBCXX_SEMAPHORE_BASE_H
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return _M_base.compare_exchange_strong(__i1, __i2, __m); }
-#if __cplusplus > 201703L
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
- void wait(bool __old, memory_order __m = memory_order_seq_cst) const
- noexcept
+#if __cplusplus > 201703L && defined _GLIBCXX_HAVE_ATOMIC_WAIT
+ void
+ wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
{ _M_base.wait(__old, __m); }
// TODO add const volatile overload
- void notify_one() const noexcept
+ void
+ notify_one() const noexcept
{ _M_base.notify_one(); }
- void notify_all() const noexcept
+ void
+ notify_all() const noexcept
{ _M_base.notify_all(); }
-#endif // GTHREADS || LINUX_FUTEX
-#endif
+#endif // C++20 && HAVE_ATOMIC_WAIT
};
#if __cplusplus <= 201703L
{ return compare_exchange_strong(__e, __i, __m,
__cmpexch_failure_order(__m)); }
-#if __cplusplus > 201703L
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
- void wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
+#if __cplusplus > 201703L && defined _GLIBCXX_HAVE_ATOMIC_WAIT
+ void
+ wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
{
std::__atomic_wait(&_M_i, __old,
[__m, this, __old]
// TODO add const volatile overload
- void notify_one() const noexcept
+ void
+ notify_one() const noexcept
{ std::__atomic_notify(&_M_i, false); }
- void notify_all() const noexcept
+ void
+ notify_all() const noexcept
{ std::__atomic_notify(&_M_i, true); }
-#endif // GTHREADS || LINUX_FUTEX
-#endif // C++20
+#endif // C++20 && HAVE_ATOMIC_WAIT
};
#undef _GLIBCXX20_INIT
__cmpexch_failure_order(__m));
}
-#if __cplusplus > 201703L
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
- void wait(__pointer_type __old,
- memory_order __m = memory_order_seq_cst) noexcept
+#if __cplusplus > 201703L && _GLIBCXX_HAVE_ATOMIC_WAIT
+ void
+ wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) noexcept
{ _M_b.wait(__old, __m); }
// TODO add const volatile overload
- void notify_one() const noexcept
+ void
+ notify_one() const noexcept
{ _M_b.notify_one(); }
- void notify_all() const noexcept
+ void
+ notify_all() const noexcept
{ _M_b.notify_all(); }
-#endif // GTHREADS || LINUX_FUTEX
-#endif
+#endif // C++20 && HAVE_ATOMIC_WAIT
__pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
}
-#if __cplusplus > 201703L
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#if __cplusplus > 201703L && _GLIBCXX_HAVE_ATOMIC_WAIT
template<typename _Tp>
inline void
atomic_wait(const atomic<_Tp>* __a,
inline void
atomic_notify_all(atomic<_Tp>* __a) noexcept
{ __a->notify_all(); }
-
-#endif // GTHREADS || LINUX_FUTEX
-#endif // C++2a
+#endif // C++20 && HAVE_ATOMIC_WAIT
// Function templates for atomic_integral and atomic_pointer operations only.
// Some operations (and, or, xor) are only available for atomic integrals,
#include <bits/atomic_base.h>
#include <ext/numeric_traits.h>
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
namespace std _GLIBCXX_VISIBILITY(default)
{
_GLIBCXX_BEGIN_NAMESPACE_VERSION
};
_GLIBCXX_END_NAMESPACE_VERSION
} // namespace
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
#endif // __cplusplus > 201703L
#endif // _GLIBCXX_LATCH
*/
#ifndef _GLIBCXX_SEMAPHORE
-#define _GLIBCXX_SEMAPHORE
+#define _GLIBCXX_SEMAPHORE 1
#pragma GCC system_header
#if __cplusplus > 201703L
#include <bits/semaphore_base.h>
-#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+#ifdef _GLIBCXX_HAVE_ATOMIC_WAIT
#include <ext/numeric_traits.h>
namespace std _GLIBCXX_VISIBILITY(default)
using binary_semaphore = std::counting_semaphore<1>;
_GLIBCXX_END_NAMESPACE_VERSION
} // namespace
-#endif // GTHREADS || LINUX_FUTEX
+#endif // HAVE_ATOMIC_WAIT
#endif // C++20
#endif // _GLIBCXX_SEMAPHORE