return __v == __GCC_ATOMIC_TEST_AND_SET_TRUEVAL;
}
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
_GLIBCXX_ALWAYS_INLINE void
wait(bool __old,
memory_order __m = memory_order_seq_cst) const noexcept
{ std::__atomic_notify(&_M_i, true); }
// TODO add const volatile overload
+#endif // GTHREADS || LINUX_FUTEX
#endif // C++20
_GLIBCXX_ALWAYS_INLINE void
}
#if __cplusplus > 201703L
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
_GLIBCXX_ALWAYS_INLINE void
wait(__int_type __old,
memory_order __m = memory_order_seq_cst) const noexcept
{ std::__atomic_notify(&_M_i, true); }
// TODO add const volatile overload
+#endif // GTHREADS || LINUX_FUTEX
#endif // C++2a
_GLIBCXX_ALWAYS_INLINE __int_type
}
#if __cplusplus > 201703L
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
_GLIBCXX_ALWAYS_INLINE void
wait(__pointer_type __old,
memory_order __m = memory_order_seq_cst) noexcept
{ std::__atomic_notify(&_M_p, true); }
// TODO add const volatile overload
+#endif // GTHREADS || LINUX_FUTEX
#endif // C++2a
_GLIBCXX_ALWAYS_INLINE __pointer_type
}
#if __cplusplus > 201703L
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
template<typename _Tp>
_GLIBCXX_ALWAYS_INLINE void
wait(const _Tp* __ptr, _Val<_Tp> __old,
{ std::__atomic_notify(__ptr, true); }
// TODO add const volatile overload
+#endif // GTHREADS || LINUX_FUTEX
#endif // C++2a
template<typename _Tp>
__cmpexch_failure_order(__order));
}
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
_GLIBCXX_ALWAYS_INLINE void
wait(_Fp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(&_M_fp, __old, __m); }
{ __atomic_impl::notify_all(&_M_fp); }
// TODO add const volatile overload
+#endif // GTHREADS || LINUX_FUTEX
value_type
fetch_add(value_type __i,
__cmpexch_failure_order(__order));
}
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
_GLIBCXX_ALWAYS_INLINE void
wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(_M_ptr, __old, __m); }
{ __atomic_impl::notify_all(_M_ptr); }
// TODO add const volatile overload
+#endif // GTHREADS || LINUX_FUTEX
private:
_Tp* _M_ptr;
__cmpexch_failure_order(__order));
}
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
_GLIBCXX_ALWAYS_INLINE void
wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(_M_ptr, __old, __m); }
{ __atomic_impl::notify_all(_M_ptr); }
// TODO add const volatile overload
+#endif // GTHREADS || LINUX_FUTEX
value_type
fetch_add(value_type __i,
__cmpexch_failure_order(__order));
}
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
_GLIBCXX_ALWAYS_INLINE void
wait(_Fp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(_M_ptr, __old, __m); }
{ __atomic_impl::notify_all(_M_ptr); }
// TODO add const volatile overload
+#endif // GTHREADS || LINUX_FUTEX
value_type
fetch_add(value_type __i,
__cmpexch_failure_order(__order));
}
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
_GLIBCXX_ALWAYS_INLINE void
wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
{ __atomic_impl::wait(_M_ptr, __old, __m); }
{ __atomic_impl::notify_all(_M_ptr); }
// TODO add const volatile overload
+#endif // GTHREADS || LINUX_FUTEX
_GLIBCXX_ALWAYS_INLINE value_type
fetch_add(difference_type __d,
#pragma GCC system_header
#include <bits/c++config.h>
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
#include <bits/functional_hash.h>
#include <bits/atomic_wait.h>
}
_GLIBCXX_END_NAMESPACE_VERSION
} // namespace std
-#endif
+#endif // GTHREADS || LINUX_FUTEX
+#endif // _GLIBCXX_ATOMIC_TIMED_WAIT_H
#pragma GCC system_header
#include <bits/c++config.h>
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
#include <bits/functional_hash.h>
#include <bits/gthr.h>
#include <bits/std_mutex.h>
}
}
- template<typename _Tp>
+ template<typename _Tp>
void
__platform_notify(const _Tp* __addr, bool __all) noexcept
{
bool
_M_waiting() const noexcept
- {
- __platform_wait_t __res;
- __atomic_load(&_M_wait, &__res, __ATOMIC_ACQUIRE);
- return __res;
- }
+ {
+ __platform_wait_t __res;
+ __atomic_load(&_M_wait, &__res, __ATOMIC_ACQUIRE);
+ return __res;
+ }
void
_M_notify(bool __all) noexcept
}
_GLIBCXX_END_NAMESPACE_VERSION
} // namespace std
-#endif
+#endif // GTHREADS || LINUX_FUTEX
+#endif // _GLIBCXX_ATOMIC_WAIT_H
#pragma GCC system_header
#include <bits/c++config.h>
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
#include <bits/atomic_base.h>
#include <bits/atomic_timed_wait.h>
noexcept
{ return _M_try_acquire_until(__clock_t::now() + __rtime); }
- private:
- sem_t _M_semaphore;
- };
+ private:
+ sem_t _M_semaphore;
+ };
#endif // _GLIBCXX_HAVE_POSIX_SEMAPHORE
- template<typename _Tp>
- struct __atomic_semaphore
+ template<typename _Tp>
+ struct __atomic_semaphore
+ {
+ static_assert(std::is_integral_v<_Tp>);
+ static_assert(__gnu_cxx::__int_traits<_Tp>::__max
+ <= __gnu_cxx::__int_traits<ptrdiff_t>::__max);
+ static constexpr ptrdiff_t _S_max = __gnu_cxx::__int_traits<_Tp>::__max;
+
+ explicit __atomic_semaphore(_Tp __count) noexcept
+ : _M_counter(__count)
{
- static_assert(std::is_integral_v<_Tp>);
- static_assert(__gnu_cxx::__int_traits<_Tp>::__max
- <= __gnu_cxx::__int_traits<ptrdiff_t>::__max);
- static constexpr ptrdiff_t _S_max = __gnu_cxx::__int_traits<_Tp>::__max;
+ __glibcxx_assert(__count >= 0 && __count <= _S_max);
+ }
- explicit __atomic_semaphore(_Tp __count) noexcept
- : _M_counter(__count)
- {
- __glibcxx_assert(__count >= 0 && __count <= _S_max);
- }
+ __atomic_semaphore(const __atomic_semaphore&) = delete;
+ __atomic_semaphore& operator=(const __atomic_semaphore&) = delete;
- __atomic_semaphore(const __atomic_semaphore&) = delete;
- __atomic_semaphore& operator=(const __atomic_semaphore&) = delete;
+ _GLIBCXX_ALWAYS_INLINE void
+ _M_acquire() noexcept
+ {
+ auto const __pred = [this]
+ {
+ auto __old = __atomic_impl::load(&this->_M_counter,
+ memory_order::acquire);
+ if (__old == 0)
+ return false;
+ return __atomic_impl::compare_exchange_strong(&this->_M_counter,
+ __old, __old - 1,
+ memory_order::acquire,
+ memory_order::release);
+ };
+ auto __old = __atomic_impl::load(&_M_counter, memory_order_relaxed);
+ std::__atomic_wait(&_M_counter, __old, __pred);
+ }
+
+ bool
+ _M_try_acquire() noexcept
+ {
+ auto __old = __atomic_impl::load(&_M_counter, memory_order::acquire);
+ auto const __pred = [this, __old]
+ {
+ if (__old == 0)
+ return false;
+
+ auto __prev = __old;
+ return __atomic_impl::compare_exchange_weak(&this->_M_counter,
+ __prev, __prev - 1,
+ memory_order::acquire,
+ memory_order::release);
+ };
+ return std::__atomic_spin(__pred);
+ }
- _GLIBCXX_ALWAYS_INLINE void
- _M_acquire() noexcept
+ template<typename _Clock, typename _Duration>
+ _GLIBCXX_ALWAYS_INLINE bool
+ _M_try_acquire_until(const chrono::time_point<_Clock,
+ _Duration>& __atime) noexcept
{
auto const __pred = [this]
{
if (__old == 0)
return false;
return __atomic_impl::compare_exchange_strong(&this->_M_counter,
- __old, __old - 1,
- memory_order::acquire,
- memory_order::release);
- };
- auto __old = __atomic_impl::load(&_M_counter, memory_order_relaxed);
- std::__atomic_wait(&_M_counter, __old, __pred);
- }
-
- bool
- _M_try_acquire() noexcept
- {
- auto __old = __atomic_impl::load(&_M_counter, memory_order::acquire);
- auto const __pred = [this, __old]
- {
- if (__old == 0)
- return false;
-
- auto __prev = __old;
- return __atomic_impl::compare_exchange_weak(&this->_M_counter,
- __prev, __prev - 1,
- memory_order::acquire,
- memory_order::release);
+ __old, __old - 1,
+ memory_order::acquire,
+ memory_order::release);
};
- return std::__atomic_spin(__pred);
- }
- template<typename _Clock, typename _Duration>
- _GLIBCXX_ALWAYS_INLINE bool
- _M_try_acquire_until(const chrono::time_point<_Clock,
- _Duration>& __atime) noexcept
- {
- auto const __pred = [this]
- {
- auto __old = __atomic_impl::load(&this->_M_counter,
- memory_order::acquire);
- if (__old == 0)
- return false;
- return __atomic_impl::compare_exchange_strong(&this->_M_counter,
- __old, __old - 1,
- memory_order::acquire,
- memory_order::release);
- };
-
- auto __old = __atomic_impl::load(&_M_counter, memory_order_relaxed);
- return __atomic_wait_until(&_M_counter, __old, __pred, __atime);
+ auto __old = __atomic_impl::load(&_M_counter, memory_order_relaxed);
+ return __atomic_wait_until(&_M_counter, __old, __pred, __atime);
}
template<typename _Rep, typename _Period>
_GLIBCXX_END_NAMESPACE_VERSION
} // namespace std
-#endif
+#endif // GTHREADS || LINUX_FUTEX
+#endif // _GLIBCXX_SEMAPHORE_BASE_H
{ return _M_base.compare_exchange_strong(__i1, __i2, __m); }
#if __cplusplus > 201703L
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
void wait(bool __old, memory_order __m = memory_order_seq_cst) const
noexcept
{ _M_base.wait(__old, __m); }
void notify_all() const noexcept
{ _M_base.notify_all(); }
+#endif // GTHREADS || LINUX_FUTEX
#endif
};
__cmpexch_failure_order(__m)); }
#if __cplusplus > 201703L
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
void wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
{
std::__atomic_wait(&_M_i, __old,
void notify_all() const noexcept
{ std::__atomic_notify(&_M_i, true); }
-#endif
+#endif // GTHREADS || LINUX_FUTEX
+#endif // C++20
};
#undef _GLIBCXX20_INIT
}
#if __cplusplus > 201703L
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
void wait(__pointer_type __old,
memory_order __m = memory_order_seq_cst) noexcept
{ _M_b.wait(__old, __m); }
void notify_all() const noexcept
{ _M_b.notify_all(); }
+#endif // GTHREADS || LINUX_FUTEX
#endif
__pointer_type
fetch_add(ptrdiff_t __d,
#if __cplusplus > 201703L
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
template<typename _Tp>
inline void
atomic_wait(const atomic<_Tp>* __a,
atomic_notify_all(atomic<_Tp>* __a) noexcept
{ __a->notify_all(); }
+#endif // GTHREADS || LINUX_FUTEX
#endif // C++2a
// Function templates for atomic_integral and atomic_pointer operations only.
#pragma GCC system_header
#if __cplusplus > 201703L
-#define __cpp_lib_semaphore 201907L
#include <bits/semaphore_base.h>
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
#include <ext/numeric_traits.h>
namespace std _GLIBCXX_VISIBILITY(default)
{
_GLIBCXX_BEGIN_NAMESPACE_VERSION
+#define __cpp_lib_semaphore 201907L
+
template<ptrdiff_t __least_max_value =
__gnu_cxx::__int_traits<ptrdiff_t>::__max>
class counting_semaphore
using binary_semaphore = std::counting_semaphore<1>;
_GLIBCXX_END_NAMESPACE_VERSION
} // namespace
-#endif // __cplusplus > 201703L
+#endif // GTHREADS || LINUX_FUTEX
+#endif // C++20
#endif // _GLIBCXX_SEMAPHORE
#if __cpp_lib_concepts
# define __cpp_lib_ranges 201911L
#endif
-#define __cpp_lib_semaphore 201907L
+#if defined _GLIBCXX_HAS_GTHREADS || _GLIBCXX_HAVE_LINUX_FUTEX
+# define __cpp_lib_semaphore 201907L
+#endif
#define __cpp_lib_shift 201806L
#if __cpp_lib_concepts
# define __cpp_lib_span 202002L