30#ifndef _SHARED_PTR_ATOMIC_H
31#define _SHARED_PTR_ATOMIC_H 1
35namespace std _GLIBCXX_VISIBILITY(default)
37_GLIBCXX_BEGIN_NAMESPACE_VERSION
49 _Sp_locker(
const _Sp_locker&) =
delete;
50 _Sp_locker& operator=(
const _Sp_locker&) =
delete;
54 _Sp_locker(
const void*)
noexcept;
55 _Sp_locker(
const void*,
const void*)
noexcept;
59 unsigned char _M_key1;
60 unsigned char _M_key2;
62 explicit _Sp_locker(
const void*,
const void* =
nullptr) { }
74 template<
typename _Tp, _Lock_policy _Lp>
79 return __gthread_active_p() == 0;
85 template<
typename _Tp>
88 {
return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
101 template<
typename _Tp>
102 inline shared_ptr<_Tp>
105 _Sp_locker __lock{__p};
109 template<
typename _Tp>
112 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
114 template<
typename _Tp, _Lock_policy _Lp>
115 inline __shared_ptr<_Tp, _Lp>
116 atomic_load_explicit(
const __shared_ptr<_Tp, _Lp>* __p,
memory_order)
118 _Sp_locker __lock{__p};
122 template<
typename _Tp, _Lock_policy _Lp>
123 inline __shared_ptr<_Tp, _Lp>
124 atomic_load(
const __shared_ptr<_Tp, _Lp>* __p)
125 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
137 template<
typename _Tp>
142 _Sp_locker __lock{__p};
146 template<
typename _Tp>
149 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
151 template<
typename _Tp, _Lock_policy _Lp>
153 atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
154 __shared_ptr<_Tp, _Lp> __r,
157 _Sp_locker __lock{__p};
161 template<
typename _Tp, _Lock_policy _Lp>
163 atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
164 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
174 template<
typename _Tp>
175 inline shared_ptr<_Tp>
179 _Sp_locker __lock{__p};
184 template<
typename _Tp>
188 return std::atomic_exchange_explicit(__p,
std::move(__r),
189 memory_order_seq_cst);
192 template<
typename _Tp, _Lock_policy _Lp>
193 inline __shared_ptr<_Tp, _Lp>
194 atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
195 __shared_ptr<_Tp, _Lp> __r,
198 _Sp_locker __lock{__p};
203 template<
typename _Tp, _Lock_policy _Lp>
204 inline __shared_ptr<_Tp, _Lp>
205 atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
207 return std::atomic_exchange_explicit(__p,
std::move(__r),
208 memory_order_seq_cst);
223 template<
typename _Tp>
232 _Sp_locker __lock{__p, __v};
234 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
245 template<
typename _Tp>
250 return std::atomic_compare_exchange_strong_explicit(__p, __v,
251 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
254 template<
typename _Tp>
256 atomic_compare_exchange_weak_explicit(shared_ptr<_Tp>* __p,
257 shared_ptr<_Tp>* __v,
262 return std::atomic_compare_exchange_strong_explicit(__p, __v,
266 template<
typename _Tp>
268 atomic_compare_exchange_weak(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
271 return std::atomic_compare_exchange_weak_explicit(__p, __v,
272 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
275 template<
typename _Tp, _Lock_policy _Lp>
277 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
278 __shared_ptr<_Tp, _Lp>* __v,
279 __shared_ptr<_Tp, _Lp> __w,
283 __shared_ptr<_Tp, _Lp> __x;
284 _Sp_locker __lock{__p, __v};
285 owner_less<__shared_ptr<_Tp, _Lp>> __less;
286 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
297 template<
typename _Tp, _Lock_policy _Lp>
299 atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
300 __shared_ptr<_Tp, _Lp>* __v,
301 __shared_ptr<_Tp, _Lp> __w)
303 return std::atomic_compare_exchange_strong_explicit(__p, __v,
304 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
307 template<
typename _Tp, _Lock_policy _Lp>
309 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
310 __shared_ptr<_Tp, _Lp>* __v,
311 __shared_ptr<_Tp, _Lp> __w,
315 return std::atomic_compare_exchange_strong_explicit(__p, __v,
319 template<
typename _Tp, _Lock_policy _Lp>
321 atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
322 __shared_ptr<_Tp, _Lp>* __v,
323 __shared_ptr<_Tp, _Lp> __w)
325 return std::atomic_compare_exchange_weak_explicit(__p, __v,
326 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
330#if __cplusplus >= 202002L
331# define __cpp_lib_atomic_shared_ptr 201711L
332 template<
typename _Tp>
335 template<
typename _Up>
336 static constexpr bool __is_shared_ptr =
false;
337 template<
typename _Up>
338 static constexpr bool __is_shared_ptr<shared_ptr<_Up>> =
true;
340 template<
typename _Tp>
343 using value_type = _Tp;
345 friend class atomic<_Tp>;
352 using __count_type =
decltype(_Tp::_M_refcount);
355 using pointer =
decltype(__count_type::_M_pi);
358 static_assert(
alignof(remove_pointer_t<pointer>) > 1);
360 constexpr _Atomic_count() noexcept = default;
363 _Atomic_count(__count_type&& __c) noexcept
364 : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
371 auto __val = _M_val.load(memory_order_relaxed);
372 __glibcxx_assert(!(__val & _S_lock_bit));
373 if (
auto __pi =
reinterpret_cast<pointer
>(__val))
375 if constexpr (__is_shared_ptr<_Tp>)
378 __pi->_M_weak_release();
382 _Atomic_count(
const _Atomic_count&) =
delete;
383 _Atomic_count& operator=(
const _Atomic_count&) =
delete;
392 auto __current = _M_val.load(memory_order_relaxed);
393 while (__current & _S_lock_bit)
395#if __cpp_lib_atomic_wait
396 __detail::__thread_relax();
398 __current = _M_val.load(memory_order_relaxed);
401 while (!_M_val.compare_exchange_strong(__current,
402 __current | _S_lock_bit,
404 memory_order_relaxed))
406#if __cpp_lib_atomic_wait
407 __detail::__thread_relax();
409 __current = __current & ~_S_lock_bit;
411 return reinterpret_cast<pointer
>(__current);
418 _M_val.fetch_sub(1, __o);
424 _M_swap_unlock(__count_type& __c,
memory_order __o)
noexcept
426 if (__o != memory_order_seq_cst)
427 __o = memory_order_release;
428 auto __x =
reinterpret_cast<uintptr_t
>(__c._M_pi);
429 __x = _M_val.exchange(__x, __o);
430 __c._M_pi =
reinterpret_cast<pointer
>(__x & ~_S_lock_bit);
433#if __cpp_lib_atomic_wait
438 auto __v = _M_val.fetch_sub(1, memory_order_relaxed);
439 _M_val.wait(__v & ~_S_lock_bit, __o);
443 notify_one() noexcept
449 notify_all() noexcept
456 mutable __atomic_base<uintptr_t> _M_val{0};
457 static constexpr uintptr_t _S_lock_bit{1};
460 typename _Tp::element_type* _M_ptr =
nullptr;
461 _Atomic_count _M_refcount;
463 static typename _Atomic_count::pointer
464 _S_add_ref(
typename _Atomic_count::pointer __p)
468 if constexpr (__is_shared_ptr<_Tp>)
469 __p->_M_add_ref_copy();
471 __p->_M_weak_add_ref();
476 constexpr _Sp_atomic() noexcept = default;
479 _Sp_atomic(value_type __r) noexcept
480 : _M_ptr(__r._M_ptr), _M_refcount(
std::
move(__r._M_refcount))
483 ~_Sp_atomic() =
default;
485 _Sp_atomic(
const _Sp_atomic&) =
delete;
486 void operator=(
const _Sp_atomic&) =
delete;
491 __glibcxx_assert(__o != memory_order_release
492 && __o != memory_order_acq_rel);
495 if (__o != memory_order_seq_cst)
496 __o = memory_order_acquire;
499 auto __pi = _M_refcount.lock(__o);
500 __ret._M_ptr = _M_ptr;
501 __ret._M_refcount._M_pi = _S_add_ref(__pi);
502 _M_refcount.unlock(memory_order_relaxed);
509 _M_refcount.lock(memory_order_acquire);
511 _M_refcount._M_swap_unlock(__r._M_refcount, __o);
515 compare_exchange_strong(value_type& __expected, value_type __desired,
518 bool __result =
true;
519 auto __pi = _M_refcount.lock(memory_order_acquire);
520 if (_M_ptr == __expected._M_ptr
521 && __pi == __expected._M_refcount._M_pi)
523 _M_ptr = __desired._M_ptr;
524 _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
529 __expected._M_ptr = _M_ptr;
530 __expected._M_refcount._M_pi = _S_add_ref(__pi);
531 _M_refcount.unlock(__o2);
537#if __cpp_lib_atomic_wait
541 auto __pi = _M_refcount.lock(memory_order_acquire);
542 if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
543 _M_refcount._M_wait_unlock(__o);
545 _M_refcount.unlock(memory_order_relaxed);
549 notify_one() noexcept
551 _M_refcount.notify_one();
555 notify_all() noexcept
557 _M_refcount.notify_all();
562 template<
typename _Tp>
563 class atomic<shared_ptr<_Tp>>
566 using value_type = shared_ptr<_Tp>;
568 static constexpr bool is_always_lock_free =
false;
571 is_lock_free() const noexcept
574 constexpr atomic() noexcept = default;
578 constexpr atomic(nullptr_t) noexcept : atomic() { }
580 atomic(shared_ptr<_Tp> __r) noexcept
584 atomic(
const atomic&) =
delete;
585 void operator=(
const atomic&) =
delete;
588 load(
memory_order __o = memory_order_seq_cst)
const noexcept
589 {
return _M_impl.load(__o); }
591 operator shared_ptr<_Tp>() const noexcept
592 {
return _M_impl.load(memory_order_seq_cst); }
595 store(shared_ptr<_Tp> __desired,
597 { _M_impl.swap(__desired, __o); }
600 operator=(shared_ptr<_Tp> __desired)
noexcept
601 { _M_impl.swap(__desired, memory_order_seq_cst); }
604 exchange(shared_ptr<_Tp> __desired,
607 _M_impl.swap(__desired, __o);
612 compare_exchange_strong(shared_ptr<_Tp>& __expected,
613 shared_ptr<_Tp> __desired,
616 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
620 compare_exchange_strong(value_type& __expected, value_type __desired,
626 case memory_order_acq_rel:
627 __o2 = memory_order_acquire;
629 case memory_order_release:
630 __o2 = memory_order_relaxed;
635 return compare_exchange_strong(__expected,
std::move(__desired),
640 compare_exchange_weak(value_type& __expected, value_type __desired,
643 return compare_exchange_strong(__expected,
std::move(__desired),
648 compare_exchange_weak(value_type& __expected, value_type __desired,
651 return compare_exchange_strong(__expected,
std::move(__desired), __o);
654#if __cpp_lib_atomic_wait
656 wait(value_type __old,
663 notify_one() noexcept
665 _M_impl.notify_one();
669 notify_all() noexcept
671 _M_impl.notify_all();
676 _Sp_atomic<shared_ptr<_Tp>> _M_impl;
679 template<
typename _Tp>
680 class atomic<weak_ptr<_Tp>>
683 using value_type = weak_ptr<_Tp>;
685 static constexpr bool is_always_lock_free =
false;
688 is_lock_free() const noexcept
691 constexpr atomic() noexcept = default;
693 atomic(weak_ptr<_Tp> __r) noexcept
697 atomic(
const atomic&) =
delete;
698 void operator=(
const atomic&) =
delete;
701 load(
memory_order __o = memory_order_seq_cst)
const noexcept
702 {
return _M_impl.load(__o); }
704 operator weak_ptr<_Tp>() const noexcept
705 {
return _M_impl.load(memory_order_seq_cst); }
708 store(weak_ptr<_Tp> __desired,
710 { _M_impl.swap(__desired, __o); }
713 operator=(weak_ptr<_Tp> __desired)
noexcept
714 { _M_impl.swap(__desired, memory_order_seq_cst); }
717 exchange(weak_ptr<_Tp> __desired,
720 _M_impl.swap(__desired, __o);
725 compare_exchange_strong(weak_ptr<_Tp>& __expected,
726 weak_ptr<_Tp> __desired,
729 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
733 compare_exchange_strong(value_type& __expected, value_type __desired,
739 case memory_order_acq_rel:
740 __o2 = memory_order_acquire;
742 case memory_order_release:
743 __o2 = memory_order_relaxed;
748 return compare_exchange_strong(__expected,
std::move(__desired),
753 compare_exchange_weak(value_type& __expected, value_type __desired,
756 return compare_exchange_strong(__expected,
std::move(__desired),
761 compare_exchange_weak(value_type& __expected, value_type __desired,
764 return compare_exchange_strong(__expected,
std::move(__desired), __o);
767#if __cpp_lib_atomic_wait
769 wait(value_type __old,
776 notify_one() noexcept
778 _M_impl.notify_one();
782 notify_all() noexcept
784 _M_impl.notify_all();
789 _Sp_atomic<weak_ptr<_Tp>> _M_impl;
796_GLIBCXX_END_NAMESPACE_VERSION
shared_ptr< _Tp > atomic_exchange_explicit(shared_ptr< _Tp > *__p, shared_ptr< _Tp > __r, memory_order)
Atomic exchange for shared_ptr objects.
bool atomic_compare_exchange_strong_explicit(shared_ptr< _Tp > *__p, shared_ptr< _Tp > *__v, shared_ptr< _Tp > __w, memory_order, memory_order)
Atomic compare-and-swap for shared_ptr objects.
void atomic_store_explicit(shared_ptr< _Tp > *__p, shared_ptr< _Tp > __r, memory_order)
Atomic store for shared_ptr objects.
bool atomic_is_lock_free(const __shared_ptr< _Tp, _Lp > *__p)
Report whether shared_ptr atomic operations are lock-free.
void swap(shared_ptr< _Tp > &__a, shared_ptr< _Tp > &__b) noexcept
Swap overload for shared_ptr.
shared_ptr< _Tp > atomic_load_explicit(const shared_ptr< _Tp > *__p, memory_order)
Atomic load for shared_ptr objects.
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
void swap(any &__x, any &__y) noexcept
Exchange the states of two any objects.
memory_order
Enumeration for memory_order.
void lock(_L1 &__l1, _L2 &__l2, _L3 &... __l3)
Generic lock.
ISO C++ entities toplevel namespace is std.
A smart pointer with reference-counted copy semantics.
Primary template owner_less.