30#ifndef _SHARED_PTR_ATOMIC_H
31#define _SHARED_PTR_ATOMIC_H 1
37#if defined _GLIBCXX_TSAN && __has_include(<sanitizer/tsan_interface.h>)
38#include <sanitizer/tsan_interface.h>
39#define _GLIBCXX_TSAN_MUTEX_DESTROY(X) \
40 __tsan_mutex_destroy(X, __tsan_mutex_not_static)
41#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) \
42 __tsan_mutex_pre_lock(X, __tsan_mutex_not_static|__tsan_mutex_try_lock)
43#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) __tsan_mutex_post_lock(X, \
44 __tsan_mutex_not_static|__tsan_mutex_try_lock_failed, 0)
45#define _GLIBCXX_TSAN_MUTEX_LOCKED(X) \
46 __tsan_mutex_post_lock(X, __tsan_mutex_not_static, 0)
47#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) __tsan_mutex_pre_unlock(X, 0)
48#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) __tsan_mutex_post_unlock(X, 0)
49#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) __tsan_mutex_pre_signal(X, 0)
50#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) __tsan_mutex_post_signal(X, 0)
52#define _GLIBCXX_TSAN_MUTEX_DESTROY(X)
53#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X)
54#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X)
55#define _GLIBCXX_TSAN_MUTEX_LOCKED(X)
56#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X)
57#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X)
58#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X)
59#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X)
62namespace std _GLIBCXX_VISIBILITY(default)
64_GLIBCXX_BEGIN_NAMESPACE_VERSION
76 _Sp_locker(
const _Sp_locker&) =
delete;
77 _Sp_locker& operator=(
const _Sp_locker&) =
delete;
81 _Sp_locker(
const void*)
noexcept;
82 _Sp_locker(
const void*,
const void*)
noexcept;
86 unsigned char _M_key1;
87 unsigned char _M_key2;
89 explicit _Sp_locker(
const void*,
const void* =
nullptr) { }
101 template<
typename _Tp, _Lock_policy _Lp>
102 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
104 atomic_is_lock_free(const __shared_ptr<_Tp, _Lp>*)
107 return __gthread_active_p() == 0;
113 template<
typename _Tp>
114 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
117 {
return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
130 template<
typename _Tp>
131 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
132 inline shared_ptr<_Tp>
135 _Sp_locker __lock{__p};
139 template<
typename _Tp>
140 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
141 inline shared_ptr<_Tp>
143 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
145 template<
typename _Tp, _Lock_policy _Lp>
146 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
147 inline __shared_ptr<_Tp, _Lp>
150 _Sp_locker __lock{__p};
154 template<
typename _Tp, _Lock_policy _Lp>
155 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
156 inline __shared_ptr<_Tp, _Lp>
157 atomic_load(const __shared_ptr<_Tp, _Lp>* __p)
158 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
170 template<
typename _Tp>
171 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
176 _Sp_locker __lock{__p};
180 template<
typename _Tp>
181 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
184 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
186 template<
typename _Tp, _Lock_policy _Lp>
187 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
189 atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
190 __shared_ptr<_Tp, _Lp> __r,
193 _Sp_locker __lock{__p};
197 template<
typename _Tp, _Lock_policy _Lp>
198 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
200 atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
201 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
211 template<
typename _Tp>
212 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
213 inline shared_ptr<_Tp>
217 _Sp_locker __lock{__p};
222 template<
typename _Tp>
223 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
224 inline shared_ptr<_Tp>
227 return std::atomic_exchange_explicit(__p,
std::move(__r),
228 memory_order_seq_cst);
231 template<
typename _Tp, _Lock_policy _Lp>
232 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
233 inline __shared_ptr<_Tp, _Lp>
234 atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
235 __shared_ptr<_Tp, _Lp> __r,
238 _Sp_locker __lock{__p};
243 template<
typename _Tp, _Lock_policy _Lp>
244 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
245 inline __shared_ptr<_Tp, _Lp>
246 atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
248 return std::atomic_exchange_explicit(__p,
std::move(__r),
249 memory_order_seq_cst);
264 template<
typename _Tp>
265 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
267 atomic_compare_exchange_strong_explicit(
shared_ptr<_Tp>* __p,
274 _Sp_locker __lock{__p, __v};
276 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
287 template<
typename _Tp>
288 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
293 return std::atomic_compare_exchange_strong_explicit(__p, __v,
294 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
297 template<
typename _Tp>
298 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
306 return std::atomic_compare_exchange_strong_explicit(__p, __v,
310 template<
typename _Tp>
311 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
316 return std::atomic_compare_exchange_weak_explicit(__p, __v,
317 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
320 template<
typename _Tp, _Lock_policy _Lp>
321 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
323 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
324 __shared_ptr<_Tp, _Lp>* __v,
325 __shared_ptr<_Tp, _Lp> __w,
329 __shared_ptr<_Tp, _Lp> __x;
330 _Sp_locker __lock{__p, __v};
332 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
343 template<
typename _Tp, _Lock_policy _Lp>
344 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
346 atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
347 __shared_ptr<_Tp, _Lp>* __v,
348 __shared_ptr<_Tp, _Lp> __w)
350 return std::atomic_compare_exchange_strong_explicit(__p, __v,
351 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
354 template<
typename _Tp, _Lock_policy _Lp>
355 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
357 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
358 __shared_ptr<_Tp, _Lp>* __v,
359 __shared_ptr<_Tp, _Lp> __w,
363 return std::atomic_compare_exchange_strong_explicit(__p, __v,
367 template<
typename _Tp, _Lock_policy _Lp>
368 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
370 atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
371 __shared_ptr<_Tp, _Lp>* __v,
372 __shared_ptr<_Tp, _Lp> __w)
374 return std::atomic_compare_exchange_weak_explicit(__p, __v,
375 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
381#ifdef __glibcxx_atomic_shared_ptr
382 template<
typename _Tp>
391 template<
typename _Tp>
394 using value_type = _Tp;
395 using element_type =
typename _Tp::element_type;
397 friend struct atomic<_Tp>;
404 using __count_type =
decltype(_Tp::_M_refcount);
405 using uintptr_t = __UINTPTR_TYPE__;
408 using pointer =
decltype(__count_type::_M_pi);
411 static_assert(
alignof(remove_pointer_t<pointer>) > 1);
413 constexpr _Atomic_count() noexcept = default;
416 _Atomic_count(__count_type&& __c) noexcept
417 : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
424 auto __val = _AtomicRef(_M_val).load(memory_order_relaxed);
425 _GLIBCXX_TSAN_MUTEX_DESTROY(&_M_val);
426 __glibcxx_assert(!(__val & _S_lock_bit));
427 if (
auto __pi =
reinterpret_cast<pointer
>(__val))
429 if constexpr (__is_shared_ptr<_Tp>)
432 __pi->_M_weak_release();
436 _Atomic_count(
const _Atomic_count&) =
delete;
437 _Atomic_count& operator=(
const _Atomic_count&) =
delete;
442 lock(memory_order __o)
const noexcept
446 _AtomicRef __aref(_M_val);
447 auto __current = __aref.load(memory_order_relaxed);
448 while (__current & _S_lock_bit)
450#if __glibcxx_atomic_wait
451 __detail::__thread_relax();
453 __current = __aref.load(memory_order_relaxed);
456 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
458 while (!__aref.compare_exchange_strong(__current,
459 __current | _S_lock_bit,
461 memory_order_relaxed))
463 _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(&_M_val);
464#if __glibcxx_atomic_wait
465 __detail::__thread_relax();
467 __current = __current & ~_S_lock_bit;
468 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
470 _GLIBCXX_TSAN_MUTEX_LOCKED(&_M_val);
471 return reinterpret_cast<pointer
>(__current);
476 unlock(memory_order __o)
const noexcept
478 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
479 _AtomicRef(_M_val).fetch_sub(1, __o);
480 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
486 _M_swap_unlock(__count_type& __c, memory_order __o)
noexcept
488 if (__o != memory_order_seq_cst)
489 __o = memory_order_release;
490 auto __x =
reinterpret_cast<uintptr_t
>(__c._M_pi);
491 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
492 __x = _AtomicRef(_M_val).exchange(__x, __o);
493 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
494 __c._M_pi =
reinterpret_cast<pointer
>(__x & ~_S_lock_bit);
497#if __glibcxx_atomic_wait
500 _M_wait_unlock(
const element_type*
const& __ptr, memory_order __o)
const noexcept
502 auto __old_ptr = __ptr;
503 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
505 = _AtomicRef(_M_val).fetch_sub(1, memory_order_relaxed) - 1u;
506 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
511 if (__o != memory_order_seq_cst)
512 __lo = memory_order_acquire;
514 std::__atomic_wait_address(
516 [=, &__ptr,
this](uintptr_t __new_pi)
518 if (__old_pi != (__new_pi & ~_S_lock_bit))
525 __new_pi =
reinterpret_cast<uintptr_t
>(this->
lock(__lo));
526 auto __new_ptr = __ptr;
527 this->unlock(memory_order_relaxed);
529 return __new_pi != __old_pi || __new_ptr != __old_ptr;
531 [__o,
this] {
return _AtomicRef(_M_val).load(__o); });
535 notify_one() noexcept
537 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
538 _AtomicRef(_M_val).notify_one();
539 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
543 notify_all() noexcept
545 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
546 _AtomicRef(_M_val).notify_all();
547 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
552 using _AtomicRef = __atomic_ref<uintptr_t>;
553 alignas(_AtomicRef::required_alignment)
mutable uintptr_t _M_val{0};
554 static constexpr uintptr_t _S_lock_bit{1};
557 element_type* _M_ptr =
nullptr;
558 _Atomic_count _M_refcount;
560 static typename _Atomic_count::pointer
561 _S_add_ref(
typename _Atomic_count::pointer __p)
565 if constexpr (__is_shared_ptr<_Tp>)
566 __p->_M_add_ref_copy();
568 __p->_M_weak_add_ref();
573 constexpr _Sp_atomic() noexcept = default;
576 _Sp_atomic(value_type __r) noexcept
577 : _M_ptr(__r._M_ptr), _M_refcount(std::move(__r._M_refcount))
580 ~_Sp_atomic() =
default;
582 _Sp_atomic(
const _Sp_atomic&) =
delete;
583 void operator=(
const _Sp_atomic&) =
delete;
586 load(memory_order __o)
const noexcept
588 __glibcxx_assert(__o != memory_order_release
589 && __o != memory_order_acq_rel);
592 if (__o != memory_order_seq_cst)
593 __o = memory_order_acquire;
596 auto __pi = _M_refcount.lock(__o);
597 __ret._M_ptr = _M_ptr;
598 __ret._M_refcount._M_pi = _S_add_ref(__pi);
599 _M_refcount.unlock(memory_order_relaxed);
604 swap(value_type& __r, memory_order __o)
noexcept
606 _M_refcount.lock(memory_order_acquire);
607 std::swap(_M_ptr, __r._M_ptr);
608 _M_refcount._M_swap_unlock(__r._M_refcount, __o);
612 compare_exchange_strong(value_type& __expected, value_type __desired,
613 memory_order __o, memory_order __o2)
noexcept
615 bool __result =
true;
616 auto __pi = _M_refcount.lock(memory_order_acquire);
617 if (_M_ptr == __expected._M_ptr
618 && __pi == __expected._M_refcount._M_pi)
620 _M_ptr = __desired._M_ptr;
621 _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
626 __expected._M_ptr = _M_ptr;
627 __expected._M_refcount._M_pi = _S_add_ref(__pi);
628 _M_refcount.unlock(__o2);
634#if __glibcxx_atomic_wait
636 wait(value_type __old, memory_order __o)
const noexcept
638 auto __pi = _M_refcount.lock(memory_order_acquire);
639 if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
640 _M_refcount._M_wait_unlock(_M_ptr, __o);
642 _M_refcount.unlock(memory_order_relaxed);
646 notify_one() noexcept
648 _M_refcount.notify_one();
652 notify_all() noexcept
654 _M_refcount.notify_all();
659 template<
typename _Tp>
663 using value_type = shared_ptr<_Tp>;
665 static constexpr bool is_always_lock_free =
false;
668 is_lock_free() const noexcept
671 constexpr atomic() noexcept = default;
675 constexpr atomic(nullptr_t) noexcept : atomic() { }
677 atomic(shared_ptr<_Tp> __r) noexcept
681 atomic(
const atomic&) =
delete;
682 void operator=(
const atomic&) =
delete;
685 load(
memory_order __o = memory_order_seq_cst)
const noexcept
686 {
return _M_impl.load(__o); }
688 operator shared_ptr<_Tp>() const noexcept
689 {
return _M_impl.load(memory_order_seq_cst); }
692 store(shared_ptr<_Tp> __desired,
694 { _M_impl.swap(__desired, __o); }
697 operator=(shared_ptr<_Tp> __desired)
noexcept
698 { _M_impl.swap(__desired, memory_order_seq_cst); }
703 operator=(nullptr_t)
noexcept
707 exchange(shared_ptr<_Tp> __desired,
710 _M_impl.swap(__desired, __o);
715 compare_exchange_strong(shared_ptr<_Tp>& __expected,
716 shared_ptr<_Tp> __desired,
719 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
723 compare_exchange_strong(value_type& __expected, value_type __desired,
729 case memory_order_acq_rel:
730 __o2 = memory_order_acquire;
732 case memory_order_release:
733 __o2 = memory_order_relaxed;
738 return compare_exchange_strong(__expected,
std::move(__desired),
743 compare_exchange_weak(value_type& __expected, value_type __desired,
746 return compare_exchange_strong(__expected,
std::move(__desired),
751 compare_exchange_weak(value_type& __expected, value_type __desired,
754 return compare_exchange_strong(__expected,
std::move(__desired), __o);
757#if __glibcxx_atomic_wait
759 wait(value_type __old,
766 notify_one() noexcept
768 _M_impl.notify_one();
772 notify_all() noexcept
774 _M_impl.notify_all();
779 _Sp_atomic<shared_ptr<_Tp>> _M_impl;
782 template<
typename _Tp>
786 using value_type = weak_ptr<_Tp>;
788 static constexpr bool is_always_lock_free =
false;
791 is_lock_free() const noexcept
794 constexpr atomic() noexcept = default;
796 atomic(weak_ptr<_Tp> __r) noexcept
800 atomic(
const atomic&) =
delete;
801 void operator=(
const atomic&) =
delete;
804 load(
memory_order __o = memory_order_seq_cst)
const noexcept
805 {
return _M_impl.load(__o); }
807 operator weak_ptr<_Tp>() const noexcept
808 {
return _M_impl.load(memory_order_seq_cst); }
811 store(weak_ptr<_Tp> __desired,
813 { _M_impl.swap(__desired, __o); }
816 operator=(weak_ptr<_Tp> __desired)
noexcept
817 { _M_impl.swap(__desired, memory_order_seq_cst); }
820 exchange(weak_ptr<_Tp> __desired,
823 _M_impl.swap(__desired, __o);
828 compare_exchange_strong(weak_ptr<_Tp>& __expected,
829 weak_ptr<_Tp> __desired,
832 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
836 compare_exchange_strong(value_type& __expected, value_type __desired,
842 case memory_order_acq_rel:
843 __o2 = memory_order_acquire;
845 case memory_order_release:
846 __o2 = memory_order_relaxed;
851 return compare_exchange_strong(__expected,
std::move(__desired),
856 compare_exchange_weak(value_type& __expected, value_type __desired,
859 return compare_exchange_strong(__expected,
std::move(__desired),
864 compare_exchange_weak(value_type& __expected, value_type __desired,
867 return compare_exchange_strong(__expected,
std::move(__desired), __o);
870#if __glibcxx_atomic_wait
872 wait(value_type __old,
879 notify_one() noexcept
881 _M_impl.notify_one();
885 notify_all() noexcept
887 _M_impl.notify_all();
892 _Sp_atomic<weak_ptr<_Tp>> _M_impl;
897_GLIBCXX_END_NAMESPACE_VERSION
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
memory_order
Enumeration for memory_order.
void lock(_L1 &__l1, _L2 &__l2, _L3 &... __l3)
Generic lock.
ISO C++ entities toplevel namespace is std.
Generic atomic type, primary class template.
A smart pointer with reference-counted copy semantics.
A non-owning observer for a pointer owned by a shared_ptr.
Primary template owner_less.