[libc++] Refactor atomic_wait using lambdas (#115746)

Now that we've dropped support for older C++ dialects in the
synchronization library, we can use lambdas to clarify some of the code
used to implement atomic_wait.
This commit is contained in:
Louis Dionne
2024-11-27 14:49:57 -05:00
committed by GitHub
parent c60b055d46
commit d681e1030f
3 changed files with 33 additions and 37 deletions

View File

@@ -57,19 +57,8 @@ struct __atomic_waitable< _Tp,
decltype(__atomic_waitable_traits<__decay_t<_Tp> >::__atomic_contention_address(
std::declval<const _Tp&>()))> > : true_type {};
template <class _AtomicWaitable, class _Poll>
struct __atomic_wait_poll_impl {
const _AtomicWaitable& __a_;
_Poll __poll_;
memory_order __order_;
_LIBCPP_HIDE_FROM_ABI bool operator()() const {
auto __current_val = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_load(__a_, __order_);
return __poll_(__current_val);
}
};
#if _LIBCPP_HAS_THREADS
#if _LIBCPP_STD_VER >= 20
# if _LIBCPP_HAS_THREADS
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*) _NOEXCEPT;
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*) _NOEXCEPT;
@@ -144,11 +133,16 @@ struct __atomic_wait_backoff_impl {
// value. The predicate function must not return `false` spuriously.
template <class _AtomicWaitable, class _Poll>
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
__atomic_wait_unless(const _AtomicWaitable& __a, _Poll&& __poll, memory_order __order) {
__atomic_wait_unless(const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
__atomic_wait_poll_impl<_AtomicWaitable, __decay_t<_Poll> > __poll_impl = {__a, __poll, __order};
__atomic_wait_backoff_impl<_AtomicWaitable, __decay_t<_Poll> > __backoff_fn = {__a, __poll, __order};
std::__libcpp_thread_poll_with_backoff(__poll_impl, __backoff_fn);
std::__libcpp_thread_poll_with_backoff(
/* poll */
[&]() {
auto __current_val = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_load(__a, __order);
return __poll(__current_val);
},
/* backoff */ __backoff_fn);
}
template <class _AtomicWaitable>
@@ -163,12 +157,17 @@ _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _
std::__cxx_atomic_notify_all(__atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_contention_address(__a));
}
#else // _LIBCPP_HAS_THREADS
# else // _LIBCPP_HAS_THREADS
template <class _AtomicWaitable, class _Poll>
_LIBCPP_HIDE_FROM_ABI void __atomic_wait_unless(const _AtomicWaitable& __a, _Poll&& __poll, memory_order __order) {
__atomic_wait_poll_impl<_AtomicWaitable, __decay_t<_Poll> > __poll_fn = {__a, __poll, __order};
std::__libcpp_thread_poll_with_backoff(__poll_fn, __spinning_backoff_policy());
_LIBCPP_HIDE_FROM_ABI void __atomic_wait_unless(const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
std::__libcpp_thread_poll_with_backoff(
/* poll */
[&]() {
auto __current_val = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_load(__a, __order);
return __poll(__current_val);
},
/* backoff */ __spinning_backoff_policy());
}
template <class _AtomicWaitable>
@@ -177,29 +176,24 @@ _LIBCPP_HIDE_FROM_ABI void __atomic_notify_one(const _AtomicWaitable&) {}
template <class _AtomicWaitable>
_LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _AtomicWaitable&) {}
#endif // _LIBCPP_HAS_THREADS
# endif // _LIBCPP_HAS_THREADS
template <typename _Tp>
_LIBCPP_HIDE_FROM_ABI bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
return std::memcmp(std::addressof(__lhs), std::addressof(__rhs), sizeof(_Tp)) == 0;
}
template <class _Tp>
struct __atomic_compare_unequal_to {
_Tp __val_;
_LIBCPP_HIDE_FROM_ABI bool operator()(const _Tp& __arg) const {
return !std::__cxx_nonatomic_compare_equal(__arg, __val_);
}
};
template <class _AtomicWaitable, class _Up>
template <class _AtomicWaitable, class _Tp>
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
__atomic_wait(_AtomicWaitable& __a, _Up __val, memory_order __order) {
__atomic_wait(_AtomicWaitable& __a, _Tp __val, memory_order __order) {
static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
__atomic_compare_unequal_to<_Up> __nonatomic_equal = {__val};
std::__atomic_wait_unless(__a, __nonatomic_equal, __order);
std::__atomic_wait_unless(__a, __order, [&](_Tp const& __current) {
return !std::__cxx_nonatomic_compare_equal(__current, __val);
});
}
#endif // C++20
_LIBCPP_END_NAMESPACE_STD
#endif // _LIBCPP___ATOMIC_ATOMIC_SYNC_H

View File

@@ -99,8 +99,9 @@ public:
return try_wait_impl(__value);
}
inline _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void wait() const {
std::__atomic_wait_unless(
__a_, [this](ptrdiff_t& __value) -> bool { return try_wait_impl(__value); }, memory_order_acquire);
std::__atomic_wait_unless(__a_, memory_order_acquire, [this](ptrdiff_t& __value) -> bool {
return try_wait_impl(__value);
});
}
inline _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void arrive_and_wait(ptrdiff_t __update = 1) {
_LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(__update >= 0, "latch::arrive_and_wait called with a negative value");

View File

@@ -96,8 +96,9 @@ public:
}
}
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void acquire() {
std::__atomic_wait_unless(
__a_, [this](ptrdiff_t& __old) { return __try_acquire_impl(__old); }, memory_order_relaxed);
std::__atomic_wait_unless(__a_, memory_order_relaxed, [this](ptrdiff_t& __old) {
return __try_acquire_impl(__old);
});
}
template <class _Rep, class _Period>
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI bool