Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2026-05-03 08:13:13

0001 // -*- C++ -*-
0002 //===----------------------------------------------------------------------===//
0003 //
0004 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
0005 // See https://llvm.org/LICENSE.txt for license information.
0006 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
0007 //
0008 //                        Kokkos v. 4.0
0009 //       Copyright (2022) National Technology & Engineering
0010 //               Solutions of Sandia, LLC (NTESS).
0011 //
0012 // Under the terms of Contract DE-NA0003525 with NTESS,
0013 // the U.S. Government retains certain rights in this software.
0014 //
0015 //===---------------------------------------------------------------------===//
0016 
0017 #ifndef _LIBCPP___ATOMIC_ATOMIC_REF_H
0018 #define _LIBCPP___ATOMIC_ATOMIC_REF_H
0019 
0020 #include <__assert>
0021 #include <__atomic/atomic_sync.h>
0022 #include <__atomic/check_memory_order.h>
0023 #include <__atomic/memory_order.h>
0024 #include <__atomic/to_gcc_order.h>
0025 #include <__concepts/arithmetic.h>
0026 #include <__concepts/same_as.h>
0027 #include <__config>
0028 #include <__cstddef/byte.h>
0029 #include <__cstddef/ptrdiff_t.h>
0030 #include <__memory/addressof.h>
0031 #include <__type_traits/has_unique_object_representation.h>
0032 #include <__type_traits/is_trivially_copyable.h>
0033 #include <cstdint>
0034 #include <cstring>
0035 
0036 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
0037 #  pragma GCC system_header
0038 #endif
0039 
0040 _LIBCPP_PUSH_MACROS
0041 #include <__undef_macros>
0042 
0043 _LIBCPP_BEGIN_NAMESPACE_STD
0044 
0045 #if _LIBCPP_STD_VER >= 20
0046 
0047 // These types are required to make __atomic_is_always_lock_free work across GCC and Clang.
0048 // The purpose of this trick is to make sure that we provide an object with the correct alignment
0049 // to __atomic_is_always_lock_free, since that answer depends on the alignment.
0050 template <size_t _Alignment>
0051 struct __alignment_checker_type {
0052   alignas(_Alignment) char __data;
0053 };
0054 
0055 template <size_t _Alignment>
0056 struct __get_aligner_instance {
0057   static constexpr __alignment_checker_type<_Alignment> __instance{};
0058 };
0059 
0060 template <class _Tp>
0061 struct __atomic_ref_base {
0062 private:
0063   _LIBCPP_HIDE_FROM_ABI static _Tp* __clear_padding(_Tp& __val) noexcept {
0064     _Tp* __ptr = std::addressof(__val);
0065 #  if __has_builtin(__builtin_clear_padding)
0066     __builtin_clear_padding(__ptr);
0067 #  endif
0068     return __ptr;
0069   }
0070 
0071   _LIBCPP_HIDE_FROM_ABI static bool __compare_exchange(
0072       _Tp* __ptr, _Tp* __expected, _Tp* __desired, bool __is_weak, int __success, int __failure) noexcept {
0073     if constexpr (
0074 #  if __has_builtin(__builtin_clear_padding)
0075         has_unique_object_representations_v<_Tp> || floating_point<_Tp>
0076 #  else
0077         true // NOLINT(readability-simplify-boolean-expr)
0078 #  endif
0079     ) {
0080       return __atomic_compare_exchange(__ptr, __expected, __desired, __is_weak, __success, __failure);
0081     } else { // _Tp has padding bits and __builtin_clear_padding is available
0082       __clear_padding(*__desired);
0083       _Tp __copy = *__expected;
0084       __clear_padding(__copy);
0085       // The algorithm we use here is basically to perform `__atomic_compare_exchange` on the
0086       // values until it has either succeeded, or failed because the value representation of the
0087       // objects involved was different. This is why we loop around __atomic_compare_exchange:
0088       // we basically loop until its failure is caused by the value representation of the objects
0089       // being different, not only their object representation.
0090       while (true) {
0091         _Tp __prev = __copy;
0092         if (__atomic_compare_exchange(__ptr, std::addressof(__copy), __desired, __is_weak, __success, __failure)) {
0093           return true;
0094         }
0095         _Tp __curr = __copy;
0096         if (std::memcmp(__clear_padding(__prev), __clear_padding(__curr), sizeof(_Tp)) != 0) {
0097           // Value representation without padding bits do not compare equal ->
0098           // write the current content of *ptr into *expected
0099           std::memcpy(__expected, std::addressof(__copy), sizeof(_Tp));
0100           return false;
0101         }
0102       }
0103     }
0104   }
0105 
0106   friend struct __atomic_waitable_traits<__atomic_ref_base<_Tp>>;
0107 
0108   // require types that are 1, 2, 4, 8, or 16 bytes in length to be aligned to at least their size to be potentially
0109   // used lock-free
0110   static constexpr size_t __min_alignment = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || (sizeof(_Tp) > 16) ? 0 : sizeof(_Tp);
0111 
0112 public:
0113   using value_type = _Tp;
0114 
0115   static constexpr size_t required_alignment = alignof(_Tp) > __min_alignment ? alignof(_Tp) : __min_alignment;
0116 
0117   // The __atomic_always_lock_free builtin takes into account the alignment of the pointer if provided,
0118   // so we create a fake pointer with a suitable alignment when querying it. Note that we are guaranteed
0119   // that the pointer is going to be aligned properly at runtime because that is a (checked) precondition
0120   // of atomic_ref's constructor.
0121   static constexpr bool is_always_lock_free =
0122       __atomic_always_lock_free(sizeof(_Tp), &__get_aligner_instance<required_alignment>::__instance);
0123 
0124   _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const noexcept { return __atomic_is_lock_free(sizeof(_Tp), __ptr_); }
0125 
0126   _LIBCPP_HIDE_FROM_ABI void store(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept
0127       _LIBCPP_CHECK_STORE_MEMORY_ORDER(__order) {
0128     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0129         __order == memory_order::relaxed || __order == memory_order::release || __order == memory_order::seq_cst,
0130         "atomic_ref: memory order argument to atomic store operation is invalid");
0131     __atomic_store(__ptr_, __clear_padding(__desired), std::__to_gcc_order(__order));
0132   }
0133 
0134   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept {
0135     store(__desired);
0136     return __desired;
0137   }
0138 
0139   _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __order = memory_order::seq_cst) const noexcept
0140       _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__order) {
0141     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0142         __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
0143             __order == memory_order::seq_cst,
0144         "atomic_ref: memory order argument to atomic load operation is invalid");
0145     alignas(_Tp) byte __mem[sizeof(_Tp)];
0146     auto* __ret = reinterpret_cast<_Tp*>(__mem);
0147     __atomic_load(__ptr_, __ret, std::__to_gcc_order(__order));
0148     return *__ret;
0149   }
0150 
0151   _LIBCPP_HIDE_FROM_ABI operator _Tp() const noexcept { return load(); }
0152 
0153   _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
0154     alignas(_Tp) byte __mem[sizeof(_Tp)];
0155     auto* __ret = reinterpret_cast<_Tp*>(__mem);
0156     __atomic_exchange(__ptr_, __clear_padding(__desired), __ret, std::__to_gcc_order(__order));
0157     return *__ret;
0158   }
0159 
0160   _LIBCPP_HIDE_FROM_ABI bool
0161   compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
0162       _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {
0163     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0164         __failure == memory_order::relaxed || __failure == memory_order::consume ||
0165             __failure == memory_order::acquire || __failure == memory_order::seq_cst,
0166         "atomic_ref: failure memory order argument to weak atomic compare-and-exchange operation is invalid");
0167     return __compare_exchange(
0168         __ptr_,
0169         std::addressof(__expected),
0170         std::addressof(__desired),
0171         true,
0172         std::__to_gcc_order(__success),
0173         std::__to_gcc_order(__failure));
0174   }
0175   _LIBCPP_HIDE_FROM_ABI bool
0176   compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept
0177       _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) {
0178     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0179         __failure == memory_order::relaxed || __failure == memory_order::consume ||
0180             __failure == memory_order::acquire || __failure == memory_order::seq_cst,
0181         "atomic_ref: failure memory order argument to strong atomic compare-and-exchange operation is invalid");
0182     return __compare_exchange(
0183         __ptr_,
0184         std::addressof(__expected),
0185         std::addressof(__desired),
0186         false,
0187         std::__to_gcc_order(__success),
0188         std::__to_gcc_order(__failure));
0189   }
0190 
0191   _LIBCPP_HIDE_FROM_ABI bool
0192   compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
0193     return __compare_exchange(
0194         __ptr_,
0195         std::addressof(__expected),
0196         std::addressof(__desired),
0197         true,
0198         std::__to_gcc_order(__order),
0199         std::__to_gcc_failure_order(__order));
0200   }
0201   _LIBCPP_HIDE_FROM_ABI bool
0202   compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept {
0203     return __compare_exchange(
0204         __ptr_,
0205         std::addressof(__expected),
0206         std::addressof(__desired),
0207         false,
0208         std::__to_gcc_order(__order),
0209         std::__to_gcc_failure_order(__order));
0210   }
0211 
0212   _LIBCPP_HIDE_FROM_ABI void wait(_Tp __old, memory_order __order = memory_order::seq_cst) const noexcept
0213       _LIBCPP_CHECK_WAIT_MEMORY_ORDER(__order) {
0214     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0215         __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire ||
0216             __order == memory_order::seq_cst,
0217         "atomic_ref: memory order argument to atomic wait operation is invalid");
0218     std::__atomic_wait(*this, __old, __order);
0219   }
0220   _LIBCPP_HIDE_FROM_ABI void notify_one() const noexcept { std::__atomic_notify_one(*this); }
0221   _LIBCPP_HIDE_FROM_ABI void notify_all() const noexcept { std::__atomic_notify_all(*this); }
0222 
0223 protected:
0224   using _Aligned_Tp [[__gnu__::__aligned__(required_alignment), __gnu__::__nodebug__]] = _Tp;
0225   _Aligned_Tp* __ptr_;
0226 
0227   _LIBCPP_HIDE_FROM_ABI __atomic_ref_base(_Tp& __obj) : __ptr_(std::addressof(__obj)) {}
0228 };
0229 
0230 template <class _Tp>
0231 struct __atomic_waitable_traits<__atomic_ref_base<_Tp>> {
0232   static _LIBCPP_HIDE_FROM_ABI _Tp __atomic_load(const __atomic_ref_base<_Tp>& __a, memory_order __order) {
0233     return __a.load(__order);
0234   }
0235   static _LIBCPP_HIDE_FROM_ABI const _Tp* __atomic_contention_address(const __atomic_ref_base<_Tp>& __a) {
0236     return __a.__ptr_;
0237   }
0238 };
0239 
0240 template <class _Tp>
0241 struct atomic_ref : public __atomic_ref_base<_Tp> {
0242   static_assert(is_trivially_copyable_v<_Tp>, "std::atomic_ref<T> requires that 'T' be a trivially copyable type");
0243 
0244   using __base _LIBCPP_NODEBUG = __atomic_ref_base<_Tp>;
0245 
0246   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
0247     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0248         reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
0249         "atomic_ref ctor: referenced object must be aligned to required_alignment");
0250   }
0251 
0252   _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
0253 
0254   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
0255 
0256   atomic_ref& operator=(const atomic_ref&) = delete;
0257 };
0258 
0259 template <class _Tp>
0260   requires(std::integral<_Tp> && !std::same_as<bool, _Tp>)
0261 struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
0262   using __base _LIBCPP_NODEBUG = __atomic_ref_base<_Tp>;
0263 
0264   using difference_type = __base::value_type;
0265 
0266   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
0267     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0268         reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
0269         "atomic_ref ctor: referenced object must be aligned to required_alignment");
0270   }
0271 
0272   _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
0273 
0274   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
0275 
0276   atomic_ref& operator=(const atomic_ref&) = delete;
0277 
0278   _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0279     return __atomic_fetch_add(this->__ptr_, __arg, std::__to_gcc_order(__order));
0280   }
0281   _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0282     return __atomic_fetch_sub(this->__ptr_, __arg, std::__to_gcc_order(__order));
0283   }
0284   _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0285     return __atomic_fetch_and(this->__ptr_, __arg, std::__to_gcc_order(__order));
0286   }
0287   _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0288     return __atomic_fetch_or(this->__ptr_, __arg, std::__to_gcc_order(__order));
0289   }
0290   _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0291     return __atomic_fetch_xor(this->__ptr_, __arg, std::__to_gcc_order(__order));
0292   }
0293 
0294   _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) const noexcept { return fetch_add(_Tp(1)); }
0295   _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) const noexcept { return fetch_sub(_Tp(1)); }
0296   _LIBCPP_HIDE_FROM_ABI _Tp operator++() const noexcept { return fetch_add(_Tp(1)) + _Tp(1); }
0297   _LIBCPP_HIDE_FROM_ABI _Tp operator--() const noexcept { return fetch_sub(_Tp(1)) - _Tp(1); }
0298   _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
0299   _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
0300   _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __arg) const noexcept { return fetch_and(__arg) & __arg; }
0301   _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __arg) const noexcept { return fetch_or(__arg) | __arg; }
0302   _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __arg) const noexcept { return fetch_xor(__arg) ^ __arg; }
0303 };
0304 
0305 template <class _Tp>
0306   requires std::floating_point<_Tp>
0307 struct atomic_ref<_Tp> : public __atomic_ref_base<_Tp> {
0308   using __base _LIBCPP_NODEBUG = __atomic_ref_base<_Tp>;
0309 
0310   using difference_type = __base::value_type;
0311 
0312   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) {
0313     _LIBCPP_ASSERT_ARGUMENT_WITHIN_DOMAIN(
0314         reinterpret_cast<uintptr_t>(std::addressof(__obj)) % __base::required_alignment == 0,
0315         "atomic_ref ctor: referenced object must be aligned to required_alignment");
0316   }
0317 
0318   _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default;
0319 
0320   _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); }
0321 
0322   atomic_ref& operator=(const atomic_ref&) = delete;
0323 
0324   _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0325     _Tp __old = this->load(memory_order_relaxed);
0326     _Tp __new = __old + __arg;
0327     while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {
0328       __new = __old + __arg;
0329     }
0330     return __old;
0331   }
0332   _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0333     _Tp __old = this->load(memory_order_relaxed);
0334     _Tp __new = __old - __arg;
0335     while (!this->compare_exchange_weak(__old, __new, __order, memory_order_relaxed)) {
0336       __new = __old - __arg;
0337     }
0338     return __old;
0339   }
0340 
0341   _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; }
0342   _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; }
0343 };
0344 
0345 template <class _Tp>
0346 struct atomic_ref<_Tp*> : public __atomic_ref_base<_Tp*> {
0347   using __base _LIBCPP_NODEBUG = __atomic_ref_base<_Tp*>;
0348 
0349   using difference_type = ptrdiff_t;
0350 
0351   _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp*& __ptr) : __base(__ptr) {}
0352 
0353   _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __desired) const noexcept { return __base::operator=(__desired); }
0354 
0355   atomic_ref& operator=(const atomic_ref&) = delete;
0356 
0357   _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0358     return __atomic_fetch_add(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));
0359   }
0360   _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept {
0361     return __atomic_fetch_sub(this->__ptr_, __arg * sizeof(_Tp), std::__to_gcc_order(__order));
0362   }
0363 
0364   _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) const noexcept { return fetch_add(1); }
0365   _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) const noexcept { return fetch_sub(1); }
0366   _LIBCPP_HIDE_FROM_ABI _Tp* operator++() const noexcept { return fetch_add(1) + 1; }
0367   _LIBCPP_HIDE_FROM_ABI _Tp* operator--() const noexcept { return fetch_sub(1) - 1; }
0368   _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __arg) const noexcept { return fetch_add(__arg) + __arg; }
0369   _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __arg) const noexcept { return fetch_sub(__arg) - __arg; }
0370 };
0371 
0372 _LIBCPP_CTAD_SUPPORTED_FOR_TYPE(atomic_ref);
0373 
0374 #endif // _LIBCPP_STD_VER >= 20
0375 
0376 _LIBCPP_END_NAMESPACE_STD
0377 
0378 _LIBCPP_POP_MACROS
0379 
0380 #endif // _LIBCPP__ATOMIC_ATOMIC_REF_H