File indexing completed on 2025-01-30 09:33:57
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016 #ifndef BOOST_ATOMIC_DETAIL_OPS_GCC_PPC_COMMON_HPP_INCLUDED_
0017 #define BOOST_ATOMIC_DETAIL_OPS_GCC_PPC_COMMON_HPP_INCLUDED_
0018
0019 #include <boost/memory_order.hpp>
0020 #include <boost/atomic/detail/config.hpp>
0021 #include <boost/atomic/detail/header.hpp>
0022
0023 #ifdef BOOST_HAS_PRAGMA_ONCE
0024 #pragma once
0025 #endif
0026
0027 namespace boost {
0028 namespace atomics {
0029 namespace detail {
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042 struct core_arch_operations_gcc_ppc_base
0043 {
0044 static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
0045 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
0046
0047 static BOOST_FORCEINLINE void fence_before(memory_order order) BOOST_NOEXCEPT
0048 {
0049 #if defined(__powerpc64__) || defined(__PPC64__)
0050 if (order == memory_order_seq_cst)
0051 __asm__ __volatile__ ("sync" ::: "memory");
0052 else if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
0053 __asm__ __volatile__ ("lwsync" ::: "memory");
0054 #else
0055 if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
0056 __asm__ __volatile__ ("sync" ::: "memory");
0057 #endif
0058 }
0059
0060 static BOOST_FORCEINLINE void fence_after(memory_order order) BOOST_NOEXCEPT
0061 {
0062 if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
0063 __asm__ __volatile__ ("isync" ::: "memory");
0064 }
0065 };
0066
0067 }
0068 }
0069 }
0070
0071 #include <boost/atomic/detail/footer.hpp>
0072
0073 #endif