|
|
|||
File indexing completed on 2026-05-03 08:13:13
0001 //===----------------------------------------------------------------------===// 0002 // 0003 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 0004 // See https://llvm.org/LICENSE.txt for license information. 0005 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 0006 // 0007 //===----------------------------------------------------------------------===// 0008 0009 #ifndef _LIBCPP___ATOMIC_SUPPORT_H 0010 #define _LIBCPP___ATOMIC_SUPPORT_H 0011 0012 #include <__config> 0013 #include <__type_traits/is_trivially_copyable.h> 0014 0015 #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) 0016 # pragma GCC system_header 0017 #endif 0018 0019 // 0020 // This file implements base support for atomics on the platform. 0021 // 0022 // The following operations and types must be implemented (where _Atmc 0023 // is __cxx_atomic_base_impl for readability): 0024 // 0025 // clang-format off 0026 // 0027 // template <class _Tp> 0028 // struct __cxx_atomic_base_impl; 0029 // 0030 // #define __cxx_atomic_is_lock_free(__size) 0031 // 0032 // void __cxx_atomic_thread_fence(memory_order __order) noexcept; 0033 // void __cxx_atomic_signal_fence(memory_order __order) noexcept; 0034 // 0035 // template <class _Tp> 0036 // void __cxx_atomic_init(_Atmc<_Tp> volatile* __a, _Tp __val) noexcept; 0037 // template <class _Tp> 0038 // void __cxx_atomic_init(_Atmc<_Tp>* __a, _Tp __val) noexcept; 0039 // 0040 // template <class _Tp> 0041 // void __cxx_atomic_store(_Atmc<_Tp> volatile* __a, _Tp __val, memory_order __order) noexcept; 0042 // template <class _Tp> 0043 // void __cxx_atomic_store(_Atmc<_Tp>* __a, _Tp __val, memory_order __order) noexcept; 0044 // 0045 // template <class _Tp> 0046 // _Tp __cxx_atomic_load(_Atmc<_Tp> const volatile* __a, memory_order __order) noexcept; 0047 // template <class _Tp> 0048 // _Tp __cxx_atomic_load(_Atmc<_Tp> const* __a, memory_order __order) noexcept; 0049 // 0050 // template <class _Tp> 0051 // void __cxx_atomic_load_inplace(_Atmc<_Tp> const volatile* __a, _Tp* __dst, memory_order __order) noexcept; 0052 // template <class _Tp> 0053 // void __cxx_atomic_load_inplace(_Atmc<_Tp> const* __a, _Tp* __dst, memory_order __order) noexcept; 0054 // 0055 // template <class _Tp> 0056 // _Tp __cxx_atomic_exchange(_Atmc<_Tp> volatile* __a, _Tp __value, memory_order __order) noexcept; 0057 // template <class _Tp> 0058 // _Tp __cxx_atomic_exchange(_Atmc<_Tp>* __a, _Tp __value, memory_order __order) noexcept; 0059 // 0060 // template <class _Tp> 0061 // bool __cxx_atomic_compare_exchange_strong(_Atmc<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) noexcept; 0062 // template <class _Tp> 0063 // bool __cxx_atomic_compare_exchange_strong(_Atmc<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) noexcept; 0064 // 0065 // template <class _Tp> 0066 // bool __cxx_atomic_compare_exchange_weak(_Atmc<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) noexcept; 0067 // template <class _Tp> 0068 // bool __cxx_atomic_compare_exchange_weak(_Atmc<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) noexcept; 0069 // 0070 // template <class _Tp> 0071 // _Tp __cxx_atomic_fetch_add(_Atmc<_Tp> volatile* __a, _Tp __delta, memory_order __order) noexcept; 0072 // template <class _Tp> 0073 // _Tp __cxx_atomic_fetch_add(_Atmc<_Tp>* __a, _Tp __delta, memory_order __order) noexcept; 0074 // 0075 // template <class _Tp> 0076 // _Tp* __cxx_atomic_fetch_add(_Atmc<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) noexcept; 0077 // template <class _Tp> 0078 // _Tp* __cxx_atomic_fetch_add(_Atmc<_Tp*>* __a, ptrdiff_t __delta, memory_order __order) noexcept; 0079 // 0080 // template <class _Tp> 0081 // _Tp __cxx_atomic_fetch_sub(_Atmc<_Tp> volatile* __a, _Tp __delta, memory_order __order) noexcept; 0082 // template <class _Tp> 0083 // _Tp __cxx_atomic_fetch_sub(_Atmc<_Tp>* __a, _Tp __delta, memory_order __order) noexcept; 0084 // template <class _Tp> 0085 // _Tp* __cxx_atomic_fetch_sub(_Atmc<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) noexcept; 0086 // template <class _Tp> 0087 // _Tp* __cxx_atomic_fetch_sub(_Atmc<_Tp*>* __a, ptrdiff_t __delta, memory_order __order) noexcept; 0088 // 0089 // template <class _Tp> 0090 // _Tp __cxx_atomic_fetch_and(_Atmc<_Tp> volatile* __a, _Tp __pattern, memory_order __order) noexcept; 0091 // template <class _Tp> 0092 // _Tp __cxx_atomic_fetch_and(_Atmc<_Tp>* __a, _Tp __pattern, memory_order __order) noexcept; 0093 // 0094 // template <class _Tp> 0095 // _Tp __cxx_atomic_fetch_or(_Atmc<_Tp> volatile* __a, _Tp __pattern, memory_order __order) noexcept; 0096 // template <class _Tp> 0097 // _Tp __cxx_atomic_fetch_or(_Atmc<_Tp>* __a, _Tp __pattern, memory_order __order) noexcept; 0098 // template <class _Tp> 0099 // _Tp __cxx_atomic_fetch_xor(_Atmc<_Tp> volatile* __a, _Tp __pattern, memory_order __order) noexcept; 0100 // template <class _Tp> 0101 // _Tp __cxx_atomic_fetch_xor(_Atmc<_Tp>* __a, _Tp __pattern, memory_order __order) noexcept; 0102 // 0103 // clang-format on 0104 // 0105 0106 #if _LIBCPP_HAS_GCC_ATOMIC_IMP 0107 # include <__atomic/support/gcc.h> 0108 #elif _LIBCPP_HAS_C_ATOMIC_IMP 0109 # include <__atomic/support/c11.h> 0110 #endif 0111 0112 _LIBCPP_BEGIN_NAMESPACE_STD 0113 0114 template <typename _Tp, typename _Base = __cxx_atomic_base_impl<_Tp> > 0115 struct __cxx_atomic_impl : public _Base { 0116 static_assert(is_trivially_copyable<_Tp>::value, "std::atomic<T> requires that 'T' be a trivially copyable type"); 0117 0118 _LIBCPP_HIDE_FROM_ABI __cxx_atomic_impl() _NOEXCEPT = default; 0119 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT : _Base(__value) {} 0120 }; 0121 0122 _LIBCPP_END_NAMESPACE_STD 0123 0124 #endif // _LIBCPP___ATOMIC_SUPPORT_H
| [ Source navigation ] | [ Diff markup ] | [ Identifier search ] | [ general search ] |
|
This page was automatically generated by the 2.3.7 LXR engine. The LXR team |
|