Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2024-11-15 09:03:34

0001 /*
0002  * Distributed under the Boost Software License, Version 1.0.
0003  * (See accompanying file LICENSE_1_0.txt or copy at
0004  * http://www.boost.org/LICENSE_1_0.txt)
0005  *
0006  * Copyright (c) 2009 Helge Bahmann
0007  * Copyright (c) 2012 Tim Blechmann
0008  * Copyright (c) 2014 Andrey Semashev
0009  */
0010 /*!
0011  * \file   atomic/detail/core_arch_ops_msvc_arm.hpp
0012  *
0013  * This header contains implementation of the \c core_arch_operations template.
0014  */
0015 
0016 #ifndef BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_MSVC_ARM_HPP_INCLUDED_
0017 #define BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_MSVC_ARM_HPP_INCLUDED_
0018 
0019 #include <cstddef>
0020 #include <boost/memory_order.hpp>
0021 #include <boost/atomic/detail/config.hpp>
0022 #include <boost/atomic/detail/interlocked.hpp>
0023 #include <boost/atomic/detail/storage_traits.hpp>
0024 #include <boost/atomic/detail/core_arch_operations_fwd.hpp>
0025 #include <boost/atomic/detail/type_traits/make_signed.hpp>
0026 #include <boost/atomic/detail/ops_msvc_common.hpp>
0027 #include <boost/atomic/detail/fence_arch_operations.hpp>
0028 #include <boost/atomic/detail/header.hpp>
0029 
0030 #ifdef BOOST_HAS_PRAGMA_ONCE
0031 #pragma once
0032 #endif
0033 
0034 extern "C" {
0035 __int8 __iso_volatile_load8(const volatile __int8*);
0036 __int16 __iso_volatile_load16(const volatile __int16*);
0037 __int32 __iso_volatile_load32(const volatile __int32*);
0038 __int64 __iso_volatile_load64(const volatile __int64*);
0039 void __iso_volatile_store8(volatile __int8*, __int8);
0040 void __iso_volatile_store16(volatile __int16*, __int16);
0041 void __iso_volatile_store32(volatile __int32*, __int32);
0042 void __iso_volatile_store64(volatile __int64*, __int64);
0043 }
0044 #if defined(BOOST_MSVC)
0045 #pragma intrinsic(__iso_volatile_load8)
0046 #pragma intrinsic(__iso_volatile_load16)
0047 #pragma intrinsic(__iso_volatile_load32)
0048 #pragma intrinsic(__iso_volatile_load64)
0049 #pragma intrinsic(__iso_volatile_store8)
0050 #pragma intrinsic(__iso_volatile_store16)
0051 #pragma intrinsic(__iso_volatile_store32)
0052 #pragma intrinsic(__iso_volatile_store64)
0053 #endif
0054 
0055 #define BOOST_ATOMIC_DETAIL_ARM_LOAD8(p) __iso_volatile_load8((const volatile __int8*)(p))
0056 #define BOOST_ATOMIC_DETAIL_ARM_LOAD16(p) __iso_volatile_load16((const volatile __int16*)(p))
0057 #define BOOST_ATOMIC_DETAIL_ARM_LOAD32(p) __iso_volatile_load32((const volatile __int32*)(p))
0058 #define BOOST_ATOMIC_DETAIL_ARM_LOAD64(p) __iso_volatile_load64((const volatile __int64*)(p))
0059 #define BOOST_ATOMIC_DETAIL_ARM_STORE8(p, v) __iso_volatile_store8((volatile __int8*)(p), (__int8)(v))
0060 #define BOOST_ATOMIC_DETAIL_ARM_STORE16(p, v) __iso_volatile_store16((volatile __int16*)(p), (__int16)(v))
0061 #define BOOST_ATOMIC_DETAIL_ARM_STORE32(p, v) __iso_volatile_store32((volatile __int32*)(p), (__int32)(v))
0062 #define BOOST_ATOMIC_DETAIL_ARM_STORE64(p, v) __iso_volatile_store64((volatile __int64*)(p), (__int64)(v))
0063 
0064 namespace boost {
0065 namespace atomics {
0066 namespace detail {
0067 
0068 // A note about memory_order_consume. Technically, this architecture allows to avoid
0069 // unnecessary memory barrier after consume load since it supports data dependency ordering.
0070 // However, some compiler optimizations may break a seemingly valid code relying on data
0071 // dependency tracking by injecting bogus branches to aid out of order execution.
0072 // This may happen not only in Boost.Atomic code but also in user's code, which we have no
0073 // control of. See this thread: http://lists.boost.org/Archives/boost/2014/06/213890.php.
0074 // For this reason we promote memory_order_consume to memory_order_acquire.
0075 
0076 struct core_arch_operations_msvc_arm_base
0077 {
0078     static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
0079     static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
0080 
0081     static BOOST_FORCEINLINE void fence_before_store(memory_order order) BOOST_NOEXCEPT
0082     {
0083         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0084 
0085         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
0086             fence_arch_operations::hardware_full_fence();
0087 
0088         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0089     }
0090 
0091     static BOOST_FORCEINLINE void fence_after_store(memory_order order) BOOST_NOEXCEPT
0092     {
0093         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0094 
0095         if (order == memory_order_seq_cst)
0096             fence_arch_operations::hardware_full_fence();
0097 
0098         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0099     }
0100 
0101     static BOOST_FORCEINLINE void fence_after_load(memory_order order) BOOST_NOEXCEPT
0102     {
0103         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0104 
0105         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
0106             fence_arch_operations::hardware_full_fence();
0107 
0108         BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0109     }
0110 
0111     static BOOST_FORCEINLINE BOOST_CONSTEXPR memory_order cas_common_order(memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0112     {
0113         // Combine order flags together and promote memory_order_consume to memory_order_acquire
0114         return static_cast< memory_order >(((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & ~static_cast< unsigned int >(memory_order_consume))
0115             | (((static_cast< unsigned int >(failure_order) | static_cast< unsigned int >(success_order)) & static_cast< unsigned int >(memory_order_consume)) << 1u));
0116     }
0117 };
0118 
0119 template< std::size_t Size, bool Signed, bool Interprocess, typename Derived >
0120 struct core_arch_operations_msvc_arm :
0121     public core_arch_operations_msvc_arm_base
0122 {
0123     typedef typename storage_traits< Size >::type storage_type;
0124 
0125     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = Size;
0126     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = storage_traits< Size >::alignment;
0127     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
0128     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
0129 
0130     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0131     {
0132         typedef typename boost::atomics::detail::make_signed< storage_type >::type signed_storage_type;
0133         return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
0134     }
0135 
0136     static BOOST_FORCEINLINE bool compare_exchange_weak(
0137         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0138     {
0139         return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
0140     }
0141 
0142     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0143     {
0144         return !!Derived::exchange(storage, (storage_type)1, order);
0145     }
0146 
0147     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0148     {
0149         Derived::store(storage, (storage_type)0, order);
0150     }
0151 };
0152 
0153 template< bool Signed, bool Interprocess >
0154 struct core_arch_operations< 1u, Signed, Interprocess > :
0155     public core_arch_operations_msvc_arm< 1u, Signed, Interprocess, core_arch_operations< 1u, Signed, Interprocess > >
0156 {
0157     typedef core_arch_operations_msvc_arm< 1u, Signed, Interprocess, core_arch_operations< 1u, Signed, Interprocess > > base_type;
0158     typedef typename base_type::storage_type storage_type;
0159 
0160     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0161     {
0162         base_type::fence_before_store(order);
0163         BOOST_ATOMIC_DETAIL_ARM_STORE8(&storage, v);
0164         base_type::fence_after_store(order);
0165     }
0166 
0167     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
0168     {
0169         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD8(&storage);
0170         base_type::fence_after_load(order);
0171         return v;
0172     }
0173 
0174     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0175     {
0176         switch (order)
0177         {
0178         case memory_order_relaxed:
0179             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELAXED(&storage, v));
0180             break;
0181         case memory_order_consume:
0182         case memory_order_acquire:
0183             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_ACQUIRE(&storage, v));
0184             break;
0185         case memory_order_release:
0186             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8_RELEASE(&storage, v));
0187             break;
0188         case memory_order_acq_rel:
0189         case memory_order_seq_cst:
0190         default:
0191             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
0192             break;
0193         }
0194         return v;
0195     }
0196 
0197     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0198     {
0199         switch (order)
0200         {
0201         case memory_order_relaxed:
0202             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELAXED(&storage, v));
0203             break;
0204         case memory_order_consume:
0205         case memory_order_acquire:
0206             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_ACQUIRE(&storage, v));
0207             break;
0208         case memory_order_release:
0209             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8_RELEASE(&storage, v));
0210             break;
0211         case memory_order_acq_rel:
0212         case memory_order_seq_cst:
0213         default:
0214             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
0215             break;
0216         }
0217         return v;
0218     }
0219 
0220     static BOOST_FORCEINLINE bool compare_exchange_strong(
0221         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0222     {
0223         storage_type previous = expected, old_val;
0224 
0225         switch (base_type::cas_common_order(success_order, failure_order))
0226         {
0227         case memory_order_relaxed:
0228             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELAXED(&storage, desired, previous));
0229             break;
0230         case memory_order_consume:
0231         case memory_order_acquire:
0232             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_ACQUIRE(&storage, desired, previous));
0233             break;
0234         case memory_order_release:
0235             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8_RELEASE(&storage, desired, previous));
0236             break;
0237         case memory_order_acq_rel:
0238         case memory_order_seq_cst:
0239         default:
0240             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
0241             break;
0242         }
0243         expected = old_val;
0244 
0245         return (previous == old_val);
0246     }
0247 
0248     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0249     {
0250         switch (order)
0251         {
0252         case memory_order_relaxed:
0253             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELAXED(&storage, v));
0254             break;
0255         case memory_order_consume:
0256         case memory_order_acquire:
0257             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_ACQUIRE(&storage, v));
0258             break;
0259         case memory_order_release:
0260             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8_RELEASE(&storage, v));
0261             break;
0262         case memory_order_acq_rel:
0263         case memory_order_seq_cst:
0264         default:
0265             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
0266             break;
0267         }
0268         return v;
0269     }
0270 
0271     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0272     {
0273         switch (order)
0274         {
0275         case memory_order_relaxed:
0276             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELAXED(&storage, v));
0277             break;
0278         case memory_order_consume:
0279         case memory_order_acquire:
0280             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_ACQUIRE(&storage, v));
0281             break;
0282         case memory_order_release:
0283             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8_RELEASE(&storage, v));
0284             break;
0285         case memory_order_acq_rel:
0286         case memory_order_seq_cst:
0287         default:
0288             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
0289             break;
0290         }
0291         return v;
0292     }
0293 
0294     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0295     {
0296         switch (order)
0297         {
0298         case memory_order_relaxed:
0299             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELAXED(&storage, v));
0300             break;
0301         case memory_order_consume:
0302         case memory_order_acquire:
0303             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_ACQUIRE(&storage, v));
0304             break;
0305         case memory_order_release:
0306             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8_RELEASE(&storage, v));
0307             break;
0308         case memory_order_acq_rel:
0309         case memory_order_seq_cst:
0310         default:
0311             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
0312             break;
0313         }
0314         return v;
0315     }
0316 };
0317 
0318 template< bool Signed, bool Interprocess >
0319 struct core_arch_operations< 2u, Signed, Interprocess > :
0320     public core_arch_operations_msvc_arm< 2u, Signed, Interprocess, core_arch_operations< 2u, Signed, Interprocess > >
0321 {
0322     typedef core_arch_operations_msvc_arm< 2u, Signed, Interprocess, core_arch_operations< 2u, Signed, Interprocess > > base_type;
0323     typedef typename base_type::storage_type storage_type;
0324 
0325     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0326     {
0327         base_type::fence_before_store(order);
0328         BOOST_ATOMIC_DETAIL_ARM_STORE16(&storage, v);
0329         base_type::fence_after_store(order);
0330     }
0331 
0332     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
0333     {
0334         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD16(&storage);
0335         base_type::fence_after_load(order);
0336         return v;
0337     }
0338 
0339     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0340     {
0341         switch (order)
0342         {
0343         case memory_order_relaxed:
0344             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELAXED(&storage, v));
0345             break;
0346         case memory_order_consume:
0347         case memory_order_acquire:
0348             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_ACQUIRE(&storage, v));
0349             break;
0350         case memory_order_release:
0351             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16_RELEASE(&storage, v));
0352             break;
0353         case memory_order_acq_rel:
0354         case memory_order_seq_cst:
0355         default:
0356             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
0357             break;
0358         }
0359         return v;
0360     }
0361 
0362     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0363     {
0364         switch (order)
0365         {
0366         case memory_order_relaxed:
0367             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELAXED(&storage, v));
0368             break;
0369         case memory_order_consume:
0370         case memory_order_acquire:
0371             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_ACQUIRE(&storage, v));
0372             break;
0373         case memory_order_release:
0374             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16_RELEASE(&storage, v));
0375             break;
0376         case memory_order_acq_rel:
0377         case memory_order_seq_cst:
0378         default:
0379             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
0380             break;
0381         }
0382         return v;
0383     }
0384 
0385     static BOOST_FORCEINLINE bool compare_exchange_strong(
0386         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0387     {
0388         storage_type previous = expected, old_val;
0389 
0390         switch (base_type::cas_common_order(success_order, failure_order))
0391         {
0392         case memory_order_relaxed:
0393             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELAXED(&storage, desired, previous));
0394             break;
0395         case memory_order_consume:
0396         case memory_order_acquire:
0397             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_ACQUIRE(&storage, desired, previous));
0398             break;
0399         case memory_order_release:
0400             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16_RELEASE(&storage, desired, previous));
0401             break;
0402         case memory_order_acq_rel:
0403         case memory_order_seq_cst:
0404         default:
0405             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
0406             break;
0407         }
0408         expected = old_val;
0409 
0410         return (previous == old_val);
0411     }
0412 
0413     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0414     {
0415         switch (order)
0416         {
0417         case memory_order_relaxed:
0418             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELAXED(&storage, v));
0419             break;
0420         case memory_order_consume:
0421         case memory_order_acquire:
0422             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_ACQUIRE(&storage, v));
0423             break;
0424         case memory_order_release:
0425             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16_RELEASE(&storage, v));
0426             break;
0427         case memory_order_acq_rel:
0428         case memory_order_seq_cst:
0429         default:
0430             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
0431             break;
0432         }
0433         return v;
0434     }
0435 
0436     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0437     {
0438         switch (order)
0439         {
0440         case memory_order_relaxed:
0441             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELAXED(&storage, v));
0442             break;
0443         case memory_order_consume:
0444         case memory_order_acquire:
0445             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_ACQUIRE(&storage, v));
0446             break;
0447         case memory_order_release:
0448             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16_RELEASE(&storage, v));
0449             break;
0450         case memory_order_acq_rel:
0451         case memory_order_seq_cst:
0452         default:
0453             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
0454             break;
0455         }
0456         return v;
0457     }
0458 
0459     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0460     {
0461         switch (order)
0462         {
0463         case memory_order_relaxed:
0464             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELAXED(&storage, v));
0465             break;
0466         case memory_order_consume:
0467         case memory_order_acquire:
0468             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_ACQUIRE(&storage, v));
0469             break;
0470         case memory_order_release:
0471             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16_RELEASE(&storage, v));
0472             break;
0473         case memory_order_acq_rel:
0474         case memory_order_seq_cst:
0475         default:
0476             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
0477             break;
0478         }
0479         return v;
0480     }
0481 };
0482 
0483 template< bool Signed, bool Interprocess >
0484 struct core_arch_operations< 4u, Signed, Interprocess > :
0485     public core_arch_operations_msvc_arm< 4u, Signed, Interprocess, core_arch_operations< 4u, Signed, Interprocess > >
0486 {
0487     typedef core_arch_operations_msvc_arm< 4u, Signed, Interprocess, core_arch_operations< 4u, Signed, Interprocess > > base_type;
0488     typedef typename base_type::storage_type storage_type;
0489 
0490     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0491     {
0492         base_type::fence_before_store(order);
0493         BOOST_ATOMIC_DETAIL_ARM_STORE32(&storage, v);
0494         base_type::fence_after_store(order);
0495     }
0496 
0497     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
0498     {
0499         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD32(&storage);
0500         base_type::fence_after_load(order);
0501         return v;
0502     }
0503 
0504     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0505     {
0506         switch (order)
0507         {
0508         case memory_order_relaxed:
0509             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELAXED(&storage, v));
0510             break;
0511         case memory_order_consume:
0512         case memory_order_acquire:
0513             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_ACQUIRE(&storage, v));
0514             break;
0515         case memory_order_release:
0516             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_RELEASE(&storage, v));
0517             break;
0518         case memory_order_acq_rel:
0519         case memory_order_seq_cst:
0520         default:
0521             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
0522             break;
0523         }
0524         return v;
0525     }
0526 
0527     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0528     {
0529         switch (order)
0530         {
0531         case memory_order_relaxed:
0532             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELAXED(&storage, v));
0533             break;
0534         case memory_order_consume:
0535         case memory_order_acquire:
0536             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ACQUIRE(&storage, v));
0537             break;
0538         case memory_order_release:
0539             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_RELEASE(&storage, v));
0540             break;
0541         case memory_order_acq_rel:
0542         case memory_order_seq_cst:
0543         default:
0544             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
0545             break;
0546         }
0547         return v;
0548     }
0549 
0550     static BOOST_FORCEINLINE bool compare_exchange_strong(
0551         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0552     {
0553         storage_type previous = expected, old_val;
0554 
0555         switch (base_type::cas_common_order(success_order, failure_order))
0556         {
0557         case memory_order_relaxed:
0558             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELAXED(&storage, desired, previous));
0559             break;
0560         case memory_order_consume:
0561         case memory_order_acquire:
0562             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_ACQUIRE(&storage, desired, previous));
0563             break;
0564         case memory_order_release:
0565             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_RELEASE(&storage, desired, previous));
0566             break;
0567         case memory_order_acq_rel:
0568         case memory_order_seq_cst:
0569         default:
0570             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
0571             break;
0572         }
0573         expected = old_val;
0574 
0575         return (previous == old_val);
0576     }
0577 
0578     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0579     {
0580         switch (order)
0581         {
0582         case memory_order_relaxed:
0583             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELAXED(&storage, v));
0584             break;
0585         case memory_order_consume:
0586         case memory_order_acquire:
0587             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_ACQUIRE(&storage, v));
0588             break;
0589         case memory_order_release:
0590             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND_RELEASE(&storage, v));
0591             break;
0592         case memory_order_acq_rel:
0593         case memory_order_seq_cst:
0594         default:
0595             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
0596             break;
0597         }
0598         return v;
0599     }
0600 
0601     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0602     {
0603         switch (order)
0604         {
0605         case memory_order_relaxed:
0606             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELAXED(&storage, v));
0607             break;
0608         case memory_order_consume:
0609         case memory_order_acquire:
0610             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_ACQUIRE(&storage, v));
0611             break;
0612         case memory_order_release:
0613             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR_RELEASE(&storage, v));
0614             break;
0615         case memory_order_acq_rel:
0616         case memory_order_seq_cst:
0617         default:
0618             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
0619             break;
0620         }
0621         return v;
0622     }
0623 
0624     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0625     {
0626         switch (order)
0627         {
0628         case memory_order_relaxed:
0629             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELAXED(&storage, v));
0630             break;
0631         case memory_order_consume:
0632         case memory_order_acquire:
0633             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_ACQUIRE(&storage, v));
0634             break;
0635         case memory_order_release:
0636             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR_RELEASE(&storage, v));
0637             break;
0638         case memory_order_acq_rel:
0639         case memory_order_seq_cst:
0640         default:
0641             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
0642             break;
0643         }
0644         return v;
0645     }
0646 };
0647 
0648 template< bool Signed, bool Interprocess >
0649 struct core_arch_operations< 8u, Signed, Interprocess > :
0650     public core_arch_operations_msvc_arm< 8u, Signed, Interprocess, core_arch_operations< 8u, Signed, Interprocess > >
0651 {
0652     typedef core_arch_operations_msvc_arm< 8u, Signed, Interprocess, core_arch_operations< 8u, Signed, Interprocess > > base_type;
0653     typedef typename base_type::storage_type storage_type;
0654 
0655     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0656     {
0657         base_type::fence_before_store(order);
0658         BOOST_ATOMIC_DETAIL_ARM_STORE64(&storage, v);
0659         base_type::fence_after_store(order);
0660     }
0661 
0662     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
0663     {
0664         storage_type v = BOOST_ATOMIC_DETAIL_ARM_LOAD64(&storage);
0665         base_type::fence_after_load(order);
0666         return v;
0667     }
0668 
0669     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0670     {
0671         switch (order)
0672         {
0673         case memory_order_relaxed:
0674             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELAXED(&storage, v));
0675             break;
0676         case memory_order_consume:
0677         case memory_order_acquire:
0678             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_ACQUIRE(&storage, v));
0679             break;
0680         case memory_order_release:
0681             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64_RELEASE(&storage, v));
0682             break;
0683         case memory_order_acq_rel:
0684         case memory_order_seq_cst:
0685         default:
0686             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
0687             break;
0688         }
0689         return v;
0690     }
0691 
0692     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0693     {
0694         switch (order)
0695         {
0696         case memory_order_relaxed:
0697             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELAXED(&storage, v));
0698             break;
0699         case memory_order_consume:
0700         case memory_order_acquire:
0701             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_ACQUIRE(&storage, v));
0702             break;
0703         case memory_order_release:
0704             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64_RELEASE(&storage, v));
0705             break;
0706         case memory_order_acq_rel:
0707         case memory_order_seq_cst:
0708         default:
0709             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
0710             break;
0711         }
0712         return v;
0713     }
0714 
0715     static BOOST_FORCEINLINE bool compare_exchange_strong(
0716         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0717     {
0718         storage_type previous = expected, old_val;
0719 
0720         switch (base_type::cas_common_order(success_order, failure_order))
0721         {
0722         case memory_order_relaxed:
0723             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELAXED(&storage, desired, previous));
0724             break;
0725         case memory_order_consume:
0726         case memory_order_acquire:
0727             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_ACQUIRE(&storage, desired, previous));
0728             break;
0729         case memory_order_release:
0730             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64_RELEASE(&storage, desired, previous));
0731             break;
0732         case memory_order_acq_rel:
0733         case memory_order_seq_cst:
0734         default:
0735             old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
0736             break;
0737         }
0738         expected = old_val;
0739 
0740         return (previous == old_val);
0741     }
0742 
0743     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0744     {
0745         switch (order)
0746         {
0747         case memory_order_relaxed:
0748             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELAXED(&storage, v));
0749             break;
0750         case memory_order_consume:
0751         case memory_order_acquire:
0752             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_ACQUIRE(&storage, v));
0753             break;
0754         case memory_order_release:
0755             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64_RELEASE(&storage, v));
0756             break;
0757         case memory_order_acq_rel:
0758         case memory_order_seq_cst:
0759         default:
0760             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
0761             break;
0762         }
0763         return v;
0764     }
0765 
0766     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0767     {
0768         switch (order)
0769         {
0770         case memory_order_relaxed:
0771             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELAXED(&storage, v));
0772             break;
0773         case memory_order_consume:
0774         case memory_order_acquire:
0775             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_ACQUIRE(&storage, v));
0776             break;
0777         case memory_order_release:
0778             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64_RELEASE(&storage, v));
0779             break;
0780         case memory_order_acq_rel:
0781         case memory_order_seq_cst:
0782         default:
0783             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
0784             break;
0785         }
0786         return v;
0787     }
0788 
0789     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0790     {
0791         switch (order)
0792         {
0793         case memory_order_relaxed:
0794             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELAXED(&storage, v));
0795             break;
0796         case memory_order_consume:
0797         case memory_order_acquire:
0798             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(&storage, v));
0799             break;
0800         case memory_order_release:
0801             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(&storage, v));
0802             break;
0803         case memory_order_acq_rel:
0804         case memory_order_seq_cst:
0805         default:
0806             v = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
0807             break;
0808         }
0809         return v;
0810     }
0811 };
0812 
0813 } // namespace detail
0814 } // namespace atomics
0815 } // namespace boost
0816 
0817 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD8
0818 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD16
0819 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD32
0820 #undef BOOST_ATOMIC_DETAIL_ARM_LOAD64
0821 #undef BOOST_ATOMIC_DETAIL_ARM_STORE8
0822 #undef BOOST_ATOMIC_DETAIL_ARM_STORE16
0823 #undef BOOST_ATOMIC_DETAIL_ARM_STORE32
0824 #undef BOOST_ATOMIC_DETAIL_ARM_STORE64
0825 
0826 #include <boost/atomic/detail/footer.hpp>
0827 
0828 #endif // BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_MSVC_ARM_HPP_INCLUDED_