Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2026-05-03 08:13:23

0001 // -*- C++ -*-
0002 //===----------------------------------------------------------------------===//
0003 //
0004 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
0005 // See https://llvm.org/LICENSE.txt for license information.
0006 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
0007 //
0008 //                        Kokkos v. 4.0
0009 //       Copyright (2022) National Technology & Engineering
0010 //               Solutions of Sandia, LLC (NTESS).
0011 //
0012 // Under the terms of Contract DE-NA0003525 with NTESS,
0013 // the U.S. Government retains certain rights in this software.
0014 //
0015 //===---------------------------------------------------------------------===//
0016 
0017 #ifndef _LIBCPP___CXX03___ATOMIC_ATOMIC_REF_H
0018 #define _LIBCPP___CXX03___ATOMIC_ATOMIC_REF_H
0019 
0020 #include <__cxx03/__assert>
0021 #include <__cxx03/__atomic/atomic_sync.h>
0022 #include <__cxx03/__atomic/check_memory_order.h>
0023 #include <__cxx03/__atomic/to_gcc_order.h>
0024 #include <__cxx03/__concepts/arithmetic.h>
0025 #include <__cxx03/__concepts/same_as.h>
0026 #include <__cxx03/__config>
0027 #include <__cxx03/__memory/addressof.h>
0028 #include <__cxx03/__type_traits/has_unique_object_representation.h>
0029 #include <__cxx03/__type_traits/is_trivially_copyable.h>
0030 #include <__cxx03/cstddef>
0031 #include <__cxx03/cstdint>
0032 #include <__cxx03/cstring>
0033 
0034 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
0035 #  pragma GCC system_header
0036 #endif
0037 
0038 _LIBCPP_PUSH_MACROS
0039 #include <__cxx03/__undef_macros>
0040 
0041 _LIBCPP_BEGIN_NAMESPACE_STD
0042 
0043 #if _LIBCPP_STD_VER >= 20
0044 
0045 // These types are required to make __atomic_is_always_lock_free work across GCC and Clang.
0046 // The purpose of this trick is to make sure that we provide an object with the correct alignment
0047 // to __atomic_is_always_lock_free, since that answer depends on the alignment.
0048 template <size_t _Alignment>
0049 struct __alignment_checker_type {
0050   alignas(_Alignment) char __data;
0051 };
0052 
0053 template <size_t _Alignment>
0054 struct __get_aligner_instance {
0055   static constexpr __alignment_checker_type<_Alignment> __instance{};
0056 };
0057 
0058 template <class _Tp>
0059 struct __atomic_ref_base {
0060 private:
0061   _LIBCPP_HIDE_FROM_ABI static _Tp* __clear_padding(_Tp& __val) noexcept {
0062     _Tp* __ptr = std::addressof(__val);
0063 #  if __has_builtin(__builtin_clear_padding)
0064     __builtin_clear_padding(__ptr);
0065 #  endif
0066     return __ptr;
0067   }
0068 
0069   _LIBCPP_HIDE_FROM_ABI static bool __compare_exchange(
0070       _Tp* __ptr, _Tp* __expected, _Tp* __desired, bool __is_weak, int __success, int __failure) noexcept {
0071     if constexpr (
0072 #  if __has_builtin(__builtin_clear_padding)
0073         has_unique_object_representations_v<_Tp> || floating_point<_Tp>
0074 #  else
0075         true // NOLINT(readability-simplify-boolean-expr)
0076 #  endif
0077     ) {
0078       return __atomic_compare_exchange(__ptr, __expected, __desired, __is_weak, __success, __failure);
0079     } else { // _Tp has padding bits and __builtin_clear_padding is available
0080       __clear_padding(*__desired);
0081       _Tp __copy = *__expected;
0082       __clear_padding(__copy);
0083       // The algorithm we use here is basically to perform `__atomic_compare_exchange` on the
0084       // values until it has either succeeded, or failed because the value representation of the
0085       // objects involved was different. This is why we loop around __atomic_compare_exchange:
0086       // we basically loop until its failure is caused by the value representation of the objects
0087       // being different, not only their object representation.
0088       while (true) {
0089         _Tp __prev = __copy;
0090         if (__atomic_compare_exchange(__ptr, std::addressof(__copy), __desired, __is_weak, __success, __failure)) {
0091           return true;
0092         }
0093         _Tp __curr = __copy;
0094         if (std::memcmp(__clear_padding(__prev), __clear_padding(__curr), sizeof(_Tp)) != 0) {
0095           // Value representation without padding bits do not compare equal ->
0096           // write the current content of *ptr into *expected
0097           std::memcpy(__expected, std::addressof(__copy), sizeof(_Tp));
0098           return false;
0099         }
0100       }
0101     }
0102   }
0103 
0104   friend struct __atomic_waitable_traits<__atomic_ref_base<_Tp>>;
0105 
0106   // require types that are 1, 2, 4, 8, or 16 bytes in length to be aligned to at least their size to be potentially
0107   // used lock-free
0108   static constexpr size_t __min_alignment = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || (sizeof(_Tp) > 16) ? 0 : sizeof(_Tp);
0109 
0110 public:
0111   using value_type = _Tp;
0112 
0113   static constexpr size_t required_alignment = alignof(_Tp) > __min_alignment ? alignof(_Tp) : __min_alignment;
0114 
0115   // The __atomic_always_lock_free builtin takes into account the alignment of the pointer if provided,
0116   // so we create a fake pointer with a suitable alignment when querying it. Note that we are guaranteed
0117   // that the pointer is going to be aligned properly at runtime because that is a (checked) precondition
0118   // of atomic_ref's constructor.
0119   static constexpr bool is_always_lock_free =
0120       __atomic_always_lock_free(sizeof(_Tp), &__get_aligner_instance<required_alignment>::__instance);
0121 
0122   _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const noexcept { return __atomic_is_lock_free(sizeof(_Tp), __ptr_); }
0123 
0124   _LIBCPP_HIDE_FROM_ABI void store(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept
0125       _LIBCPP_CHECK_STORE_MEMORY_ORDER(__order) {
0126     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0127         __order == memory_order::relaxed || __order == memory_order::release || __order == memory_order::seq_cst,
0128         "atomic_ref: memory order argument to atomic store operation is invalid");
0129     __atomic_store(__ptr_, __clear_padding(__desired), std::__to_gcc_order(__order));
0130   }
0131 
0132   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept {
0133     store(__desired);
0134     return __desired;
0135   }
0136 
0137   _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __order = memory_order::seq_cst) const noexcept
0138       _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__order) {
0139     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0140         __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
0141             __order == memory_order::seq_cst,
0142         "atomic_ref: memory order argument to atomic load operation is invalid");
0143     alignas(_Tp) byte __mem[sizeof(_Tp)];
0144     auto* __ret = reinterpret_cast<_Tp*>(__mem);
0145     __atomic_load(__ptr_, __ret, std::__to_gcc_order(__order));
0146     return *__ret;
0147   }
0148 
0149   _LIBCPP_HIDE_FROM_ABI operator _Tp() const noexcept { return load(); }
0150 
0151   _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
0152     alignas(_Tp) byte __mem[sizeof(_Tp)];
0153     auto* __ret = reinterpret_cast<_Tp*>(__mem);
0154     __atomic_exchange(__ptr_, __clear_padding(__desired), __ret, std::__to_gcc_order(__order));
0155     return *__ret;
0156   }
0157 
0158   _LIBCPP_HIDE_FROM_ABI bool
0159   compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
0160       _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {
0161     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0162         __failure == memory_order::relaxed || __failure == memory_order::consume ||
0163             __failure == memory_order::acquire || __failure == memory_order::seq_cst,
0164         "atomic_ref: failure memory order argument to weak atomic compare-and-exchange operation is invalid");
0165     return __compare_exchange(
0166         __ptr_,
0167         std::addressof(__expected),
0168         std::addressof(__desired),
0169         true,
0170         std::__to_gcc_order(__success),
0171         std::__to_gcc_order(__failure));
0172   }
0173   _LIBCPP_HIDE_FROM_ABI bool
0174   compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
0175       _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {
0176     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0177         __failure == memory_order::relaxed || __failure == memory_order::consume ||
0178             __failure == memory_order::acquire || __failure == memory_order::seq_cst,
0179         "atomic_ref: failure memory order argument to strong atomic compare-and-exchange operation is invalid");
0180     return __compare_exchange(
0181         __ptr_,
0182         std::addressof(__expected),
0183         std::addressof(__desired),
0184         false,
0185         std::__to_gcc_order(__success),
0186         std::__to_gcc_order(__failure));
0187   }
0188 
0189   _LIBCPP_HIDE_FROM_ABI bool
0190   compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
0191     return __compare_exchange(
0192         __ptr_,
0193         std::addressof(__expected),
0194         std::addressof(__desired),
0195         true,
0196         std::__to_gcc_order(__order),
0197         std::__to_gcc_failure_order(__order));
0198   }
0199   _LIBCPP_HIDE_FROM_ABI bool
0200   compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
0201     return __compare_exchange(
0202         __ptr_,
0203         std::addressof(__expected),
0204         std::addressof(__desired),
0205         false,
0206         std::__to_gcc_order(__order),
0207         std::__to_gcc_failure_order(__order));
0208   }
0209 
0210   _LIBCPP_HIDE_FROM_ABI void wait(_Tp __old, memory_order __order = memory_order::seq_cst) const noexcept
0211       _LIBCPP_CHECK_WAIT_MEMORY_ORDER(__order) {
0212     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0213         __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
0214             __order == memory_order::seq_cst,
0215         "atomic_ref: memory order argument to atomic wait operation is invalid");
0216     std::__atomic_wait(*this, __old, __order);
0217   }
0218   _LIBCPP_HIDE_FROM_ABI void notify_one() const noexcept { std::__atomic_notify_one(*this); }
0219   _LIBCPP_HIDE_FROM_ABI void notify_all() const noexcept { std::__atomic_notify_all(*this); }
0220 
0221 protected:
0222   typedef _Tp _Aligned_Tp __attribute__((aligned(required_alignment)));
0223   _Aligned_Tp* __ptr_;
0224 
0225   _LIBCPP_HIDE_FROM_ABI __atomic_ref_base(_Tp& __obj) : __ptr_(std::addressof(__obj)) {}
0226 };
0227 
0228 template <class _Tp>
0229 struct __atomic_waitable_traits<__atomic_ref_base<_Tp>> {
0230   static _LIBCPP_HIDE_FROM_ABI _Tp __atomic_load(const __atomic_ref_base<_Tp>& __a, memory_order __order) {
0231     return __a.load(__order);
0232   }
0233   static _LIBCPP_HIDE_FROM_ABI const _Tp* __atomic_contention_address(const __atomic_ref_base<_Tp>& __a) {
0234     return __a.__ptr_;
0235   }
0236 };
0237 
0238 template <class _Tp>
0239 struct atomic_ref : public __atomic_ref_base<_Tp> {
0240   static_assert(is_trivially_copyable_v<_Tp>, "std::atomic_ref<T> requires that 'T' be a trivially copyable type");
0241 
0242   using __base = __atomic_ref_base<_Tp>;
0243 
0244   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
0245     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0246         reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
0247         "atomic_ref ctor: referenced object must be aligned to required_alignment");
0248   }
0249 
0250   _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
0251 
0252   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
0253 
0254   atomic_ref& operator=(const atomic_ref&) = delete;
0255 };
0256 
0257 template <class _Tp>
0258   requires(std::integral<_Tp> && !std::same_as<bool, _Tp>)
0259 struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
0260   using __base = __atomic_ref_base<_Tp>;
0261 
0262   using difference_type = __base::value_type;
0263 
0264   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
0265     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0266         reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
0267         "atomic_ref ctor: referenced object must be aligned to required_alignment");
0268   }
0269 
0270   _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
0271 
0272   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
0273 
0274   atomic_ref& operator=(const atomic_ref&) = delete;
0275 
0276   _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0277     return __atomic_fetch_add(this->__ptr_, __arg, std::__to_gcc_order(__order));
0278   }
0279   _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0280     return __atomic_fetch_sub(this->__ptr_, __arg, std::__to_gcc_order(__order));
0281   }
0282   _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0283     return __atomic_fetch_and(this->__ptr_, __arg, std::__to_gcc_order(__order));
0284   }
0285   _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0286     return __atomic_fetch_or(this->__ptr_, __arg, std::__to_gcc_order(__order));
0287   }
0288   _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0289     return __atomic_fetch_xor(this->__ptr_, __arg, std::__to_gcc_order(__order));
0290   }
0291 
0292   _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) const noexcept { return fetch_add(_Tp(1)); }
0293   _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) const noexcept { return fetch_sub(_Tp(1)); }
0294   _LIBCPP_HIDE_FROM_ABI _Tp operator++() const noexcept { return fetch_add(_Tp(1)) + _Tp(1); }
0295   _LIBCPP_HIDE_FROM_ABI _Tp operator--() const noexcept { return fetch_sub(_Tp(1)) - _Tp(1); }
0296   _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
0297   _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
0298   _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __arg) const noexcept { return fetch_and(__arg) & __arg; }
0299   _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __arg) const noexcept { return fetch_or(__arg) | __arg; }
0300   _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __arg) const noexcept { return fetch_xor(__arg) ^ __arg; }
0301 };
0302 
0303 template <class _Tp>
0304   requires std::floating_point<_Tp>
0305 struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
0306   using __base = __atomic_ref_base<_Tp>;
0307 
0308   using difference_type = __base::value_type;
0309 
0310   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
0311     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0312         reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
0313         "atomic_ref ctor: referenced object must be aligned to required_alignment");
0314   }
0315 
0316   _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
0317 
0318   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
0319 
0320   atomic_ref& operator=(const atomic_ref&) = delete;
0321 
0322   _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0323     _Tp __old = this->load(memory_order_relaxed);
0324     _Tp __new = __old + __arg;
0325     while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {
0326       __new = __old + __arg;
0327     }
0328     return __old;
0329   }
0330   _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0331     _Tp __old = this->load(memory_order_relaxed);
0332     _Tp __new = __old - __arg;
0333     while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {
0334       __new = __old - __arg;
0335     }
0336     return __old;
0337   }
0338 
0339   _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
0340   _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
0341 };
0342 
0343 template <class _Tp>
0344 struct atomic_ref<_Tp*> : public __atomic_ref_base<_Tp*> {
0345   using __base = __atomic_ref_base<_Tp*>;
0346 
0347   using difference_type = ptrdiff_t;
0348 
0349   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp*& __ptr) : __base(__ptr) {}
0350 
0351   _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __desired) const noexcept { return __base::operator=(__desired); }
0352 
0353   atomic_ref& operator=(const atomic_ref&) = delete;
0354 
0355   _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0356     return __atomic_fetch_add(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));
0357   }
0358   _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0359     return __atomic_fetch_sub(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));
0360   }
0361 
0362   _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) const noexcept { return fetch_add(1); }
0363   _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) const noexcept { return fetch_sub(1); }
0364   _LIBCPP_HIDE_FROM_ABI _Tp* operator++() const noexcept { return fetch_add(1) + 1; }
0365   _LIBCPP_HIDE_FROM_ABI _Tp* operator--() const noexcept { return fetch_sub(1) - 1; }
0366   _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __arg) const noexcept { return fetch_add(__arg) + __arg; }
0367   _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __arg) const noexcept { return fetch_sub(__arg) - __arg; }
0368 };
0369 
0370 _LIBCPP_CTAD_SUPPORTED_FOR_TYPE(atomic_ref);
0371 
0372 #endif // _LIBCPP_STD_VER >= 20
0373 
0374 _LIBCPP_END_NAMESPACE_STD
0375 
0376 _LIBCPP_POP_MACROS
0377 
0378 #endif // _LIBCPP___CXX03__ATOMIC_ATOMIC_REF_H