Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2025-01-30 09:33:51

0001 /*
0002  * Distributed under the Boost Software License, Version 1.0.
0003  * (See accompanying file LICENSE_1_0.txt or copy at
0004  * http://www.boost.org/LICENSE_1_0.txt)
0005  *
0006  * Copyright (c) 2020 Andrey Semashev
0007  */
0008 /*!
0009  * \file   atomic/detail/core_arch_ops_gcc_aarch64.hpp
0010  *
0011  * This header contains implementation of the \c core_arch_operations template.
0012  */
0013 
0014 #ifndef BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_GCC_AARCH64_HPP_INCLUDED_
0015 #define BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_GCC_AARCH64_HPP_INCLUDED_
0016 
0017 #include <cstddef>
0018 #include <boost/cstdint.hpp>
0019 #include <boost/memory_order.hpp>
0020 #include <boost/atomic/detail/config.hpp>
0021 #include <boost/atomic/detail/storage_traits.hpp>
0022 #include <boost/atomic/detail/core_arch_operations_fwd.hpp>
0023 #include <boost/atomic/detail/capabilities.hpp>
0024 #include <boost/atomic/detail/ops_gcc_aarch64_common.hpp>
0025 #include <boost/atomic/detail/header.hpp>
0026 
0027 #ifdef BOOST_HAS_PRAGMA_ONCE
0028 #pragma once
0029 #endif
0030 
0031 namespace boost {
0032 namespace atomics {
0033 namespace detail {
0034 
0035 struct core_arch_operations_gcc_aarch64_base
0036 {
0037     static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
0038     static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
0039 };
0040 
0041 // Due to bug https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63359 we have to explicitly specify size of the registers
0042 // to use in the asm blocks below. Use %w prefix for the 32-bit registers and %x for 64-bit ones.
0043 
0044 // A note about compare_exchange implementations. Since failure_order must never include release semantics and
0045 // must not be stronger than success_order, we can always use success_order to select instructions. Thus, when
0046 // CAS fails, only the acquire semantics of success_order is applied, which may be stronger than failure_order.
0047 
0048 template< bool Signed, bool Interprocess >
0049 struct core_arch_operations< 1u, Signed, Interprocess > :
0050     public core_arch_operations_gcc_aarch64_base
0051 {
0052     typedef typename storage_traits< 1u >::type storage_type;
0053 
0054     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 1u;
0055     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 1u;
0056     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
0057     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
0058 
0059     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0060     {
0061         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
0062         {
0063             __asm__ __volatile__
0064             (
0065                 "stlrb %w[value], %[storage]\n\t"
0066                 : [storage] "=Q" (storage)
0067                 : [value] "r" (v)
0068                 : "memory"
0069             );
0070         }
0071         else
0072         {
0073             storage = v;
0074         }
0075     }
0076 
0077     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
0078     {
0079         storage_type v;
0080         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
0081         {
0082 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_RCPC)
0083             if (order == memory_order_consume || order == memory_order_acquire)
0084             {
0085                 __asm__ __volatile__
0086                 (
0087                     "ldaprb %w[value], %[storage]\n\t"
0088                     : [value] "=r" (v)
0089                     : [storage] "Q" (storage)
0090                     : "memory"
0091                 );
0092             }
0093             else
0094 #endif
0095             {
0096                 __asm__ __volatile__
0097                 (
0098                     "ldarb %w[value], %[storage]\n\t"
0099                     : [value] "=r" (v)
0100                     : [storage] "Q" (storage)
0101                     : "memory"
0102                 );
0103             }
0104         }
0105         else
0106         {
0107             v = storage;
0108         }
0109 
0110         return v;
0111     }
0112 
0113     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0114     {
0115         storage_type original;
0116 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0117 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0118         __asm__ __volatile__\
0119         (\
0120             "swp" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
0121             : [storage] "+Q" (storage), [original] "=r" (original)\
0122             : [value] "r" (v)\
0123             : "memory"\
0124         );
0125 #else
0126         uint32_t tmp;
0127 
0128 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0129         __asm__ __volatile__\
0130         (\
0131             "1:\n\t"\
0132             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0133             "st" st_mo "xrb %w[tmp], %w[value], %[storage]\n\t"\
0134             "cbnz %w[tmp], 1b\n\t"\
0135             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original] "=&r" (original)\
0136             : [value] "r" (v)\
0137             : "memory"\
0138         );
0139 #endif
0140 
0141         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0142 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0143 
0144         return original;
0145     }
0146 
0147     static BOOST_FORCEINLINE bool compare_exchange_weak(
0148         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0149     {
0150         storage_type original;
0151 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0152         original = expected;
0153 
0154 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0155         __asm__ __volatile__\
0156         (\
0157             "cas" ld_mo st_mo "b %w[original], %w[desired], %[storage]\n\t"\
0158             : [storage] "+Q" (storage), [original] "+r" (original)\
0159             : [desired] "r" (desired)\
0160             : "memory"\
0161         );
0162 
0163         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0164         bool success = original == expected;
0165 #else
0166         bool success;
0167 
0168 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0169         __asm__ __volatile__\
0170         (\
0171             "uxtb %w[expected], %w[expected]\n\t"\
0172             "mov %w[success], #0\n\t"\
0173             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0174             "cmp %w[original], %w[expected]\n\t"\
0175             "b.ne 1f\n\t"\
0176             "st" st_mo "xrb %w[success], %w[desired], %[storage]\n\t"\
0177             "eor %w[success], %w[success], #1\n\t"\
0178             "1:\n\t"\
0179             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
0180             : [desired] "r" (desired), [expected] "r" (expected)\
0181             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
0182         );
0183 
0184         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0185 #endif
0186 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0187 
0188         expected = original;
0189         return success;
0190     }
0191 
0192     static BOOST_FORCEINLINE bool compare_exchange_strong(
0193         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0194     {
0195         storage_type original;
0196 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0197         original = expected;
0198 
0199 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0200         __asm__ __volatile__\
0201         (\
0202             "cas" ld_mo st_mo "b %w[original], %w[desired], %[storage]\n\t"\
0203             : [storage] "+Q" (storage), [original] "+r" (original)\
0204             : [desired] "r" (desired)\
0205             : "memory"\
0206         );
0207 
0208         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0209         bool success = original == expected;
0210 #else
0211         bool success;
0212 
0213 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0214         __asm__ __volatile__\
0215         (\
0216             "uxtb %w[expected], %w[expected]\n\t"\
0217             "1:\n\t"\
0218             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0219             "cmp %w[original], %w[expected]\n\t"\
0220             "b.ne 2f\n\t"\
0221             "st" st_mo "xrb %w[success], %w[desired], %[storage]\n\t"\
0222             "cbnz %w[success], 1b\n\t"\
0223             "2:\n\t"\
0224             "cset %w[success], eq\n\t"\
0225             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
0226             : [desired] "r" (desired), [expected] "r" (expected)\
0227             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
0228         );
0229 
0230         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0231 #endif
0232 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0233 
0234         expected = original;
0235         return success;
0236     }
0237 
0238     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0239     {
0240         storage_type original;
0241 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0242 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0243         __asm__ __volatile__\
0244         (\
0245             "ldadd" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
0246             : [storage] "+Q" (storage), [original] "=r" (original)\
0247             : [value] "r" (v)\
0248             : "memory"\
0249         );
0250 #else
0251         storage_type result;
0252         uint32_t tmp;
0253 
0254 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0255         __asm__ __volatile__\
0256         (\
0257             "1:\n\t"\
0258             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0259             "add %w[result], %w[original], %w[value]\n\t"\
0260             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0261             "cbnz %w[tmp], 1b\n\t"\
0262             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0263             : [value] "Ir" (v)\
0264             : "memory"\
0265         );
0266 #endif
0267 
0268         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0269 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0270 
0271         return original;
0272     }
0273 
0274     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0275     {
0276         storage_type original;
0277 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0278         v = -v;
0279 
0280 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0281         __asm__ __volatile__\
0282         (\
0283             "ldadd" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
0284             : [storage] "+Q" (storage), [original] "=r" (original)\
0285             : [value] "r" (v)\
0286             : "memory"\
0287         );
0288 
0289 #else
0290         storage_type result;
0291         uint32_t tmp;
0292 
0293 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0294         __asm__ __volatile__\
0295         (\
0296             "1:\n\t"\
0297             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0298             "sub %w[result], %w[original], %w[value]\n\t"\
0299             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0300             "cbnz %w[tmp], 1b\n\t"\
0301             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0302             : [value] "Ir" (v)\
0303             : "memory"\
0304         );
0305 #endif
0306 
0307         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0308 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0309 
0310         return original;
0311     }
0312 
0313     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0314     {
0315         storage_type original;
0316 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0317         v = ~v;
0318 
0319 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0320         __asm__ __volatile__\
0321         (\
0322             "ldclr" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
0323             : [storage] "+Q" (storage), [original] "=r" (original)\
0324             : [value] "r" (v)\
0325             : "memory"\
0326         );
0327 #else
0328         storage_type result;
0329         uint32_t tmp;
0330 
0331 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0332         __asm__ __volatile__\
0333         (\
0334             "1:\n\t"\
0335             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0336             "and %w[result], %w[original], %w[value]\n\t"\
0337             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0338             "cbnz %w[tmp], 1b\n\t"\
0339             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0340             : [value] "Kr" (v)\
0341             : "memory"\
0342         );
0343 #endif
0344 
0345         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0346 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0347 
0348         return original;
0349     }
0350 
0351     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0352     {
0353         storage_type original;
0354 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0355 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0356         __asm__ __volatile__\
0357         (\
0358             "ldset" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
0359             : [storage] "+Q" (storage), [original] "=r" (original)\
0360             : [value] "r" (v)\
0361             : "memory"\
0362         );
0363 #else
0364         storage_type result;
0365         uint32_t tmp;
0366 
0367 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0368         __asm__ __volatile__\
0369         (\
0370             "1:\n\t"\
0371             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0372             "orr %w[result], %w[original], %w[value]\n\t"\
0373             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0374             "cbnz %w[tmp], 1b\n\t"\
0375             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0376             : [value] "Kr" (v)\
0377             : "memory"\
0378         );
0379 #endif
0380 
0381         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0382 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0383 
0384         return original;
0385     }
0386 
0387     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0388     {
0389         storage_type original;
0390 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0391 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0392         __asm__ __volatile__\
0393         (\
0394             "ldeor" ld_mo st_mo "b %w[value], %w[original], %[storage]\n\t"\
0395             : [storage] "+Q" (storage), [original] "=r" (original)\
0396             : [value] "r" (v)\
0397             : "memory"\
0398         );
0399 #else
0400         storage_type result;
0401         uint32_t tmp;
0402 
0403 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0404         __asm__ __volatile__\
0405         (\
0406             "1:\n\t"\
0407             "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0408             "eor %w[result], %w[original], %w[value]\n\t"\
0409             "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0410             "cbnz %w[tmp], 1b\n\t"\
0411             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0412             : [value] "Kr" (v)\
0413             : "memory"\
0414         );
0415 #endif
0416 
0417         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0418 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0419 
0420         return original;
0421     }
0422 
0423     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0424     {
0425         return !!exchange(storage, (storage_type)1, order);
0426     }
0427 
0428     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0429     {
0430         store(storage, (storage_type)0, order);
0431     }
0432 };
0433 
0434 template< bool Signed, bool Interprocess >
0435 struct core_arch_operations< 2u, Signed, Interprocess > :
0436     public core_arch_operations_gcc_aarch64_base
0437 {
0438     typedef typename storage_traits< 2u >::type storage_type;
0439 
0440     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 2u;
0441     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 2u;
0442     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
0443     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
0444 
0445     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0446     {
0447         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
0448         {
0449             __asm__ __volatile__
0450             (
0451                 "stlrh %w[value], %[storage]\n\t"
0452                 : [storage] "=Q" (storage)
0453                 : [value] "r" (v)
0454                 : "memory"
0455             );
0456         }
0457         else
0458         {
0459             storage = v;
0460         }
0461     }
0462 
0463     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
0464     {
0465         storage_type v;
0466         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
0467         {
0468 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_RCPC)
0469             if (order == memory_order_consume || order == memory_order_acquire)
0470             {
0471                 __asm__ __volatile__
0472                 (
0473                     "ldaprh %w[value], %[storage]\n\t"
0474                     : [value] "=r" (v)
0475                     : [storage] "Q" (storage)
0476                     : "memory"
0477                 );
0478             }
0479             else
0480 #endif
0481             {
0482                 __asm__ __volatile__
0483                 (
0484                     "ldarh %w[value], %[storage]\n\t"
0485                     : [value] "=r" (v)
0486                     : [storage] "Q" (storage)
0487                     : "memory"
0488                 );
0489             }
0490         }
0491         else
0492         {
0493             v = storage;
0494         }
0495 
0496         return v;
0497     }
0498 
0499     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0500     {
0501         storage_type original;
0502 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0503 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0504         __asm__ __volatile__\
0505         (\
0506             "swp" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
0507             : [storage] "+Q" (storage), [original] "=r" (original)\
0508             : [value] "r" (v)\
0509             : "memory"\
0510         );
0511 #else
0512         uint32_t tmp;
0513 
0514 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0515         __asm__ __volatile__\
0516         (\
0517             "1:\n\t"\
0518             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0519             "st" st_mo "xrh %w[tmp], %w[value], %[storage]\n\t"\
0520             "cbnz %w[tmp], 1b\n\t"\
0521             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original] "=&r" (original)\
0522             : [value] "r" (v)\
0523             : "memory"\
0524         );
0525 #endif
0526 
0527         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0528 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0529 
0530         return original;
0531     }
0532 
0533     static BOOST_FORCEINLINE bool compare_exchange_weak(
0534         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0535     {
0536         storage_type original;
0537 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0538         original = expected;
0539 
0540 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0541         __asm__ __volatile__\
0542         (\
0543             "cas" ld_mo st_mo "h %w[original], %w[desired], %[storage]\n\t"\
0544             : [storage] "+Q" (storage), [original] "+r" (original)\
0545             : [desired] "r" (desired)\
0546             : "memory"\
0547         );
0548 
0549         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0550         bool success = original == expected;
0551 #else
0552         bool success;
0553 
0554 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0555         __asm__ __volatile__\
0556         (\
0557             "uxth %w[expected], %w[expected]\n\t"\
0558             "mov %w[success], #0\n\t"\
0559             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0560             "cmp %w[original], %w[expected]\n\t"\
0561             "b.ne 1f\n\t"\
0562             "st" st_mo "xrh %w[success], %w[desired], %[storage]\n\t"\
0563             "eor %w[success], %w[success], #1\n\t"\
0564             "1:\n\t"\
0565             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
0566             : [desired] "r" (desired), [expected] "r" (expected)\
0567             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
0568         );
0569 
0570         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0571 #endif
0572 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0573 
0574         expected = original;
0575         return success;
0576     }
0577 
0578     static BOOST_FORCEINLINE bool compare_exchange_strong(
0579         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0580     {
0581         storage_type original;
0582 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0583         original = expected;
0584 
0585 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0586         __asm__ __volatile__\
0587         (\
0588             "cas" ld_mo st_mo "h %w[original], %w[desired], %[storage]\n\t"\
0589             : [storage] "+Q" (storage), [original] "+r" (original)\
0590             : [desired] "r" (desired)\
0591             : "memory"\
0592         );
0593 
0594         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0595         bool success = original == expected;
0596 #else
0597         bool success;
0598 
0599 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0600         __asm__ __volatile__\
0601         (\
0602             "uxth %w[expected], %w[expected]\n\t"\
0603             "1:\n\t"\
0604             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0605             "cmp %w[original], %w[expected]\n\t"\
0606             "b.ne 2f\n\t"\
0607             "st" st_mo "xrh %w[success], %w[desired], %[storage]\n\t"\
0608             "cbnz %w[success], 1b\n\t"\
0609             "2:\n\t"\
0610             "cset %w[success], eq\n\t"\
0611             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
0612             : [desired] "r" (desired), [expected] "r" (expected)\
0613             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
0614         );
0615 
0616         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0617 #endif
0618 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0619 
0620         expected = original;
0621         return success;
0622     }
0623 
0624     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0625     {
0626         storage_type original;
0627 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0628 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0629         __asm__ __volatile__\
0630         (\
0631             "ldadd" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
0632             : [storage] "+Q" (storage), [original] "=r" (original)\
0633             : [value] "r" (v)\
0634             : "memory"\
0635         );
0636 #else
0637         storage_type result;
0638         uint32_t tmp;
0639 
0640 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0641         __asm__ __volatile__\
0642         (\
0643             "1:\n\t"\
0644             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0645             "add %w[result], %w[original], %w[value]\n\t"\
0646             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0647             "cbnz %w[tmp], 1b\n\t"\
0648             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0649             : [value] "Ir" (v)\
0650             : "memory"\
0651         );
0652 #endif
0653 
0654         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0655 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0656 
0657         return original;
0658     }
0659 
0660     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0661     {
0662         storage_type original;
0663 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0664         v = -v;
0665 
0666 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0667         __asm__ __volatile__\
0668         (\
0669             "ldadd" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
0670             : [storage] "+Q" (storage), [original] "=r" (original)\
0671             : [value] "r" (v)\
0672             : "memory"\
0673         );
0674 
0675 #else
0676         storage_type result;
0677         uint32_t tmp;
0678 
0679 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0680         __asm__ __volatile__\
0681         (\
0682             "1:\n\t"\
0683             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0684             "sub %w[result], %w[original], %w[value]\n\t"\
0685             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0686             "cbnz %w[tmp], 1b\n\t"\
0687             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0688             : [value] "Ir" (v)\
0689             : "memory"\
0690         );
0691 #endif
0692 
0693         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0694 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0695 
0696         return original;
0697     }
0698 
0699     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0700     {
0701         storage_type original;
0702 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0703         v = ~v;
0704 
0705 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0706         __asm__ __volatile__\
0707         (\
0708             "ldclr" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
0709             : [storage] "+Q" (storage), [original] "=r" (original)\
0710             : [value] "r" (v)\
0711             : "memory"\
0712         );
0713 #else
0714         storage_type result;
0715         uint32_t tmp;
0716 
0717 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0718         __asm__ __volatile__\
0719         (\
0720             "1:\n\t"\
0721             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0722             "and %w[result], %w[original], %w[value]\n\t"\
0723             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0724             "cbnz %w[tmp], 1b\n\t"\
0725             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0726             : [value] "Kr" (v)\
0727             : "memory"\
0728         );
0729 #endif
0730 
0731         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0732 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0733 
0734         return original;
0735     }
0736 
0737     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0738     {
0739         storage_type original;
0740 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0741 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0742         __asm__ __volatile__\
0743         (\
0744             "ldset" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
0745             : [storage] "+Q" (storage), [original] "=r" (original)\
0746             : [value] "r" (v)\
0747             : "memory"\
0748         );
0749 #else
0750         storage_type result;
0751         uint32_t tmp;
0752 
0753 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0754         __asm__ __volatile__\
0755         (\
0756             "1:\n\t"\
0757             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0758             "orr %w[result], %w[original], %w[value]\n\t"\
0759             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0760             "cbnz %w[tmp], 1b\n\t"\
0761             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0762             : [value] "Kr" (v)\
0763             : "memory"\
0764         );
0765 #endif
0766 
0767         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0768 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0769 
0770         return original;
0771     }
0772 
0773     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0774     {
0775         storage_type original;
0776 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0777 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0778         __asm__ __volatile__\
0779         (\
0780             "ldeor" ld_mo st_mo "h %w[value], %w[original], %[storage]\n\t"\
0781             : [storage] "+Q" (storage), [original] "=r" (original)\
0782             : [value] "r" (v)\
0783             : "memory"\
0784         );
0785 #else
0786         storage_type result;
0787         uint32_t tmp;
0788 
0789 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0790         __asm__ __volatile__\
0791         (\
0792             "1:\n\t"\
0793             "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0794             "eor %w[result], %w[original], %w[value]\n\t"\
0795             "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0796             "cbnz %w[tmp], 1b\n\t"\
0797             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0798             : [value] "Kr" (v)\
0799             : "memory"\
0800         );
0801 #endif
0802 
0803         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0804 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0805 
0806         return original;
0807     }
0808 
0809     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0810     {
0811         return !!exchange(storage, (storage_type)1, order);
0812     }
0813 
0814     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0815     {
0816         store(storage, (storage_type)0, order);
0817     }
0818 };
0819 
0820 template< bool Signed, bool Interprocess >
0821 struct core_arch_operations< 4u, Signed, Interprocess > :
0822     public core_arch_operations_gcc_aarch64_base
0823 {
0824     typedef typename storage_traits< 4u >::type storage_type;
0825 
0826     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
0827     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 4u;
0828     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
0829     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
0830 
0831     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0832     {
0833         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
0834         {
0835             __asm__ __volatile__
0836             (
0837                 "stlr %w[value], %[storage]\n\t"
0838                 : [storage] "=Q" (storage)
0839                 : [value] "r" (v)
0840                 : "memory"
0841             );
0842         }
0843         else
0844         {
0845             storage = v;
0846         }
0847     }
0848 
0849     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
0850     {
0851         storage_type v;
0852         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
0853         {
0854 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_RCPC)
0855             if (order == memory_order_consume || order == memory_order_acquire)
0856             {
0857                 __asm__ __volatile__
0858                 (
0859                     "ldapr %w[value], %[storage]\n\t"
0860                     : [value] "=r" (v)
0861                     : [storage] "Q" (storage)
0862                     : "memory"
0863                 );
0864             }
0865             else
0866 #endif
0867             {
0868                 __asm__ __volatile__
0869                 (
0870                     "ldar %w[value], %[storage]\n\t"
0871                     : [value] "=r" (v)
0872                     : [storage] "Q" (storage)
0873                     : "memory"
0874                 );
0875             }
0876         }
0877         else
0878         {
0879             v = storage;
0880         }
0881 
0882         return v;
0883     }
0884 
0885     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0886     {
0887         storage_type original;
0888 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0889 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0890         __asm__ __volatile__\
0891         (\
0892             "swp" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
0893             : [storage] "+Q" (storage), [original] "=r" (original)\
0894             : [value] "r" (v)\
0895             : "memory"\
0896         );
0897 #else
0898         uint32_t tmp;
0899 
0900 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0901         __asm__ __volatile__\
0902         (\
0903             "1:\n\t"\
0904             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
0905             "st" st_mo "xr %w[tmp], %w[value], %[storage]\n\t"\
0906             "cbnz %w[tmp], 1b\n\t"\
0907             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original] "=&r" (original)\
0908             : [value] "r" (v)\
0909             : "memory"\
0910         );
0911 #endif
0912 
0913         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0914 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0915 
0916         return original;
0917     }
0918 
0919     static BOOST_FORCEINLINE bool compare_exchange_weak(
0920         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0921     {
0922         storage_type original;
0923 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0924         original = expected;
0925 
0926 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0927         __asm__ __volatile__\
0928         (\
0929             "cas" ld_mo st_mo " %w[original], %w[desired], %[storage]\n\t"\
0930             : [storage] "+Q" (storage), [original] "+r" (original)\
0931             : [desired] "r" (desired)\
0932             : "memory"\
0933         );
0934 
0935         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0936         bool success = original == expected;
0937 #else
0938         bool success;
0939 
0940 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0941         __asm__ __volatile__\
0942         (\
0943             "mov %w[success], #0\n\t"\
0944             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
0945             "cmp %w[original], %w[expected]\n\t"\
0946             "b.ne 1f\n\t"\
0947             "st" st_mo "xr %w[success], %w[desired], %[storage]\n\t"\
0948             "eor %w[success], %w[success], #1\n\t"\
0949             "1:\n\t"\
0950             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
0951             : [desired] "r" (desired), [expected] "Ir" (expected)\
0952             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
0953         );
0954 
0955         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0956 #endif
0957 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0958 
0959         expected = original;
0960         return success;
0961     }
0962 
0963     static BOOST_FORCEINLINE bool compare_exchange_strong(
0964         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0965     {
0966         storage_type original;
0967 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0968         original = expected;
0969 
0970 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0971         __asm__ __volatile__\
0972         (\
0973             "cas" ld_mo st_mo " %w[original], %w[desired], %[storage]\n\t"\
0974             : [storage] "+Q" (storage), [original] "+r" (original)\
0975             : [desired] "r" (desired)\
0976             : "memory"\
0977         );
0978 
0979         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
0980         bool success = original == expected;
0981 #else
0982         bool success;
0983 
0984 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0985         __asm__ __volatile__\
0986         (\
0987             "1:\n\t"\
0988             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
0989             "cmp %w[original], %w[expected]\n\t"\
0990             "b.ne 2f\n\t"\
0991             "st" st_mo "xr %w[success], %w[desired], %[storage]\n\t"\
0992             "cbnz %w[success], 1b\n\t"\
0993             "2:\n\t"\
0994             "cset %w[success], eq\n\t"\
0995             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
0996             : [desired] "r" (desired), [expected] "Ir" (expected)\
0997             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
0998         );
0999 
1000         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1001 #endif
1002 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1003 
1004         expected = original;
1005         return success;
1006     }
1007 
1008     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1009     {
1010         storage_type original;
1011 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1012 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1013         __asm__ __volatile__\
1014         (\
1015             "ldadd" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1016             : [storage] "+Q" (storage), [original] "=r" (original)\
1017             : [value] "r" (v)\
1018             : "memory"\
1019         );
1020 #else
1021         storage_type result;
1022         uint32_t tmp;
1023 
1024 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1025         __asm__ __volatile__\
1026         (\
1027             "1:\n\t"\
1028             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1029             "add %w[result], %w[original], %w[value]\n\t"\
1030             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1031             "cbnz %w[tmp], 1b\n\t"\
1032             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1033             : [value] "Ir" (v)\
1034             : "memory"\
1035         );
1036 #endif
1037 
1038         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1039 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1040 
1041         return original;
1042     }
1043 
1044     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1045     {
1046         storage_type original;
1047 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1048         v = -v;
1049 
1050 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1051         __asm__ __volatile__\
1052         (\
1053             "ldadd" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1054             : [storage] "+Q" (storage), [original] "=r" (original)\
1055             : [value] "r" (v)\
1056             : "memory"\
1057         );
1058 
1059 #else
1060         storage_type result;
1061         uint32_t tmp;
1062 
1063 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1064         __asm__ __volatile__\
1065         (\
1066             "1:\n\t"\
1067             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1068             "sub %w[result], %w[original], %w[value]\n\t"\
1069             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1070             "cbnz %w[tmp], 1b\n\t"\
1071             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1072             : [value] "Ir" (v)\
1073             : "memory"\
1074         );
1075 #endif
1076 
1077         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1078 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1079 
1080         return original;
1081     }
1082 
1083     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1084     {
1085         storage_type original;
1086 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1087         v = ~v;
1088 
1089 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1090         __asm__ __volatile__\
1091         (\
1092             "ldclr" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1093             : [storage] "+Q" (storage), [original] "=r" (original)\
1094             : [value] "r" (v)\
1095             : "memory"\
1096         );
1097 #else
1098         storage_type result;
1099         uint32_t tmp;
1100 
1101 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1102         __asm__ __volatile__\
1103         (\
1104             "1:\n\t"\
1105             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1106             "and %w[result], %w[original], %w[value]\n\t"\
1107             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1108             "cbnz %w[tmp], 1b\n\t"\
1109             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1110             : [value] "Kr" (v)\
1111             : "memory"\
1112         );
1113 #endif
1114 
1115         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1116 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1117 
1118         return original;
1119     }
1120 
1121     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1122     {
1123         storage_type original;
1124 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1125 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1126         __asm__ __volatile__\
1127         (\
1128             "ldset" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1129             : [storage] "+Q" (storage), [original] "=r" (original)\
1130             : [value] "r" (v)\
1131             : "memory"\
1132         );
1133 #else
1134         storage_type result;
1135         uint32_t tmp;
1136 
1137 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1138         __asm__ __volatile__\
1139         (\
1140             "1:\n\t"\
1141             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1142             "orr %w[result], %w[original], %w[value]\n\t"\
1143             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1144             "cbnz %w[tmp], 1b\n\t"\
1145             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1146             : [value] "Kr" (v)\
1147             : "memory"\
1148         );
1149 #endif
1150 
1151         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1152 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1153 
1154         return original;
1155     }
1156 
1157     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1158     {
1159         storage_type original;
1160 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1161 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1162         __asm__ __volatile__\
1163         (\
1164             "ldeor" ld_mo st_mo " %w[value], %w[original], %[storage]\n\t"\
1165             : [storage] "+Q" (storage), [original] "=r" (original)\
1166             : [value] "r" (v)\
1167             : "memory"\
1168         );
1169 #else
1170         storage_type result;
1171         uint32_t tmp;
1172 
1173 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1174         __asm__ __volatile__\
1175         (\
1176             "1:\n\t"\
1177             "ld" ld_mo "xr %w[original], %[storage]\n\t"\
1178             "eor %w[result], %w[original], %w[value]\n\t"\
1179             "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
1180             "cbnz %w[tmp], 1b\n\t"\
1181             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1182             : [value] "Kr" (v)\
1183             : "memory"\
1184         );
1185 #endif
1186 
1187         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1188 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1189 
1190         return original;
1191     }
1192 
1193     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1194     {
1195         return !!exchange(storage, (storage_type)1, order);
1196     }
1197 
1198     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1199     {
1200         store(storage, (storage_type)0, order);
1201     }
1202 };
1203 
1204 template< bool Signed, bool Interprocess >
1205 struct core_arch_operations< 8u, Signed, Interprocess > :
1206     public core_arch_operations_gcc_aarch64_base
1207 {
1208     typedef typename storage_traits< 8u >::type storage_type;
1209 
1210     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
1211     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 8u;
1212     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
1213     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
1214 
1215     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1216     {
1217         if ((static_cast< unsigned int >(order) & static_cast< unsigned int >(memory_order_release)) != 0u)
1218         {
1219             __asm__ __volatile__
1220             (
1221                 "stlr %x[value], %[storage]\n\t"
1222                 : [storage] "=Q" (storage)
1223                 : [value] "r" (v)
1224                 : "memory"
1225             );
1226         }
1227         else
1228         {
1229             storage = v;
1230         }
1231     }
1232 
1233     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
1234     {
1235         storage_type v;
1236         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
1237         {
1238 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_RCPC)
1239             if (order == memory_order_consume || order == memory_order_acquire)
1240             {
1241                 __asm__ __volatile__
1242                 (
1243                     "ldapr %x[value], %[storage]\n\t"
1244                     : [value] "=r" (v)
1245                     : [storage] "Q" (storage)
1246                     : "memory"
1247                 );
1248             }
1249             else
1250 #endif
1251             {
1252                 __asm__ __volatile__
1253                 (
1254                     "ldar %x[value], %[storage]\n\t"
1255                     : [value] "=r" (v)
1256                     : [storage] "Q" (storage)
1257                     : "memory"
1258                 );
1259             }
1260         }
1261         else
1262         {
1263             v = storage;
1264         }
1265 
1266         return v;
1267     }
1268 
1269     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1270     {
1271         storage_type original;
1272 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1273 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1274         __asm__ __volatile__\
1275         (\
1276             "swp" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1277             : [storage] "+Q" (storage), [original] "=r" (original)\
1278             : [value] "r" (v)\
1279             : "memory"\
1280         );
1281 #else
1282         uint32_t tmp;
1283 
1284 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1285         __asm__ __volatile__\
1286         (\
1287             "1:\n\t"\
1288             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1289             "st" st_mo "xr %w[tmp], %x[value], %[storage]\n\t"\
1290             "cbnz %w[tmp], 1b\n\t"\
1291             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original] "=&r" (original)\
1292             : [value] "r" (v)\
1293             : "memory"\
1294         );
1295 #endif
1296 
1297         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1298 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1299 
1300         return original;
1301     }
1302 
1303     static BOOST_FORCEINLINE bool compare_exchange_weak(
1304         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1305     {
1306         storage_type original;
1307 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1308         original = expected;
1309 
1310 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1311         __asm__ __volatile__\
1312         (\
1313             "cas" ld_mo st_mo " %x[original], %x[desired], %[storage]\n\t"\
1314             : [storage] "+Q" (storage), [original] "+r" (original)\
1315             : [desired] "r" (desired)\
1316             : "memory"\
1317         );
1318 
1319         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1320         bool success = original == expected;
1321 #else
1322         bool success;
1323 
1324 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1325         __asm__ __volatile__\
1326         (\
1327             "mov %w[success], #0\n\t"\
1328             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1329             "cmp %x[original], %x[expected]\n\t"\
1330             "b.ne 1f\n\t"\
1331             "st" st_mo "xr %w[success], %x[desired], %[storage]\n\t"\
1332             "eor %w[success], %w[success], #1\n\t"\
1333             "1:\n\t"\
1334             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
1335             : [desired] "r" (desired), [expected] "Ir" (expected)\
1336             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1337         );
1338 
1339         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1340 #endif
1341 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1342 
1343         expected = original;
1344         return success;
1345     }
1346 
1347     static BOOST_FORCEINLINE bool compare_exchange_strong(
1348         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1349     {
1350         storage_type original;
1351 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1352         original = expected;
1353 
1354 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1355         __asm__ __volatile__\
1356         (\
1357             "cas" ld_mo st_mo " %x[original], %x[desired], %[storage]\n\t"\
1358             : [storage] "+Q" (storage), [original] "+r" (original)\
1359             : [desired] "r" (desired)\
1360             : "memory"\
1361         );
1362 
1363         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1364         bool success = original == expected;
1365 #else
1366         bool success;
1367 
1368 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1369         __asm__ __volatile__\
1370         (\
1371             "1:\n\t"\
1372             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1373             "cmp %x[original], %x[expected]\n\t"\
1374             "b.ne 2f\n\t"\
1375             "st" st_mo "xr %w[success], %x[desired], %[storage]\n\t"\
1376             "cbnz %w[success], 1b\n\t"\
1377             "2:\n\t"\
1378             "cset %w[success], eq\n\t"\
1379             : [success] "=&r" (success), [storage] "+Q" (storage), [original] "=&r" (original)\
1380             : [desired] "r" (desired), [expected] "Ir" (expected)\
1381             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1382         );
1383 
1384         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1385 #endif
1386 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1387 
1388         expected = original;
1389         return success;
1390     }
1391 
1392     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1393     {
1394         storage_type original;
1395 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1396 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1397         __asm__ __volatile__\
1398         (\
1399             "ldadd" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1400             : [storage] "+Q" (storage), [original] "=r" (original)\
1401             : [value] "r" (v)\
1402             : "memory"\
1403         );
1404 #else
1405         storage_type result;
1406         uint32_t tmp;
1407 
1408 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1409         __asm__ __volatile__\
1410         (\
1411             "1:\n\t"\
1412             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1413             "add %x[result], %x[original], %x[value]\n\t"\
1414             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1415             "cbnz %w[tmp], 1b\n\t"\
1416             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1417             : [value] "Ir" (v)\
1418             : "memory"\
1419         );
1420 #endif
1421 
1422         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1423 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1424 
1425         return original;
1426     }
1427 
1428     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1429     {
1430         storage_type original;
1431 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1432         v = -v;
1433 
1434 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1435         __asm__ __volatile__\
1436         (\
1437             "ldadd" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1438             : [storage] "+Q" (storage), [original] "=r" (original)\
1439             : [value] "r" (v)\
1440             : "memory"\
1441         );
1442 
1443 #else
1444         storage_type result;
1445         uint32_t tmp;
1446 
1447 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1448         __asm__ __volatile__\
1449         (\
1450             "1:\n\t"\
1451             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1452             "sub %x[result], %x[original], %x[value]\n\t"\
1453             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1454             "cbnz %w[tmp], 1b\n\t"\
1455             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1456             : [value] "Ir" (v)\
1457             : "memory"\
1458         );
1459 #endif
1460 
1461         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1462 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1463 
1464         return original;
1465     }
1466 
1467     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1468     {
1469         storage_type original;
1470 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1471         v = ~v;
1472 
1473 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1474         __asm__ __volatile__\
1475         (\
1476             "ldclr" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1477             : [storage] "+Q" (storage), [original] "=r" (original)\
1478             : [value] "r" (v)\
1479             : "memory"\
1480         );
1481 #else
1482         storage_type result;
1483         uint32_t tmp;
1484 
1485 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1486         __asm__ __volatile__\
1487         (\
1488             "1:\n\t"\
1489             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1490             "and %x[result], %x[original], %x[value]\n\t"\
1491             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1492             "cbnz %w[tmp], 1b\n\t"\
1493             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1494             : [value] "Lr" (v)\
1495             : "memory"\
1496         );
1497 #endif
1498 
1499         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1500 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1501 
1502         return original;
1503     }
1504 
1505     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1506     {
1507         storage_type original;
1508 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1509 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1510         __asm__ __volatile__\
1511         (\
1512             "ldset" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1513             : [storage] "+Q" (storage), [original] "=r" (original)\
1514             : [value] "r" (v)\
1515             : "memory"\
1516         );
1517 #else
1518         storage_type result;
1519         uint32_t tmp;
1520 
1521 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1522         __asm__ __volatile__\
1523         (\
1524             "1:\n\t"\
1525             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1526             "orr %x[result], %x[original], %x[value]\n\t"\
1527             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1528             "cbnz %w[tmp], 1b\n\t"\
1529             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1530             : [value] "Lr" (v)\
1531             : "memory"\
1532         );
1533 #endif
1534 
1535         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1536 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1537 
1538         return original;
1539     }
1540 
1541     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1542     {
1543         storage_type original;
1544 #if defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
1545 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1546         __asm__ __volatile__\
1547         (\
1548             "ldeor" ld_mo st_mo " %x[value], %x[original], %[storage]\n\t"\
1549             : [storage] "+Q" (storage), [original] "=r" (original)\
1550             : [value] "r" (v)\
1551             : "memory"\
1552         );
1553 #else
1554         storage_type result;
1555         uint32_t tmp;
1556 
1557 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1558         __asm__ __volatile__\
1559         (\
1560             "1:\n\t"\
1561             "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1562             "eor %x[result], %x[original], %x[value]\n\t"\
1563             "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1564             "cbnz %w[tmp], 1b\n\t"\
1565             : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1566             : [value] "Lr" (v)\
1567             : "memory"\
1568         );
1569 #endif
1570 
1571         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1572 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1573 
1574         return original;
1575     }
1576 
1577     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1578     {
1579         return !!exchange(storage, (storage_type)1, order);
1580     }
1581 
1582     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1583     {
1584         store(storage, (storage_type)0, order);
1585     }
1586 };
1587 
1588 // For 128-bit atomic operations we always have to use ldxp+stxp (optionally, with acquire/release semantics), even in load and store operations.
1589 // ARM Architecture Reference Manual Armv8, for Armv8-A architecture profile, Section B2.2.1 "Requirements for single-copy atomicity"
1590 // specifies that ldxp does not guarantee an atomic load, and we have to perform ldxp+stxp loop to ensure that the loaded value
1591 // is consistent with a previous atomic store.
1592 //
1593 // The ldxp and stxp instructions operate on pairs of registers, meaning that each load loads two integers from memory in
1594 // successive address order, to the first and second registers in the pair, respectively, and store similarly stores two integers.
1595 // The order of these integers does not depend on the active endianness mode (although the byte order in the integers themselves
1596 // obviously does depend on endianness). This means we need to account for the current endianness mode ourselves, where it matters.
1597 //
1598 // Unlike AArch32/A32 or ARMv7, ldxp/stxp do not require adjacent even+odd registers in the pair and accept any two different
1599 // registers. Still, it may be more preferable to select the adjacent registers as 128-bit objects are represented by two adjacent
1600 // registers in the ABI. Unfortunately, clang 10 and probably older doesn't seem to support allocating register pairs in the asm blocks,
1601 // like in ARMv7. For now we use a union to convert between a pair of 64-bit elements and 128-bit storage.
1602 
1603 template< bool Signed, bool Interprocess >
1604 struct core_arch_operations< 16u, Signed, Interprocess > :
1605     public core_arch_operations_gcc_aarch64_base
1606 {
1607     typedef typename storage_traits< 16u >::type storage_type;
1608 
1609     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 16u;
1610     static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 16u;
1611     static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
1612     static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
1613 
1614     // Union to convert between two 64-bit registers and a 128-bit storage
1615     union storage_union
1616     {
1617         storage_type as_storage;
1618         uint64_t as_uint64[2u];
1619     };
1620 
1621     static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1622     {
1623         exchange(storage, v, order);
1624     }
1625 
1626     static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
1627     {
1628         storage_union v;
1629         uint32_t tmp;
1630         if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
1631         {
1632             __asm__ __volatile__
1633             (
1634                 "1:\n\t"
1635                 "ldaxp %x[value_0], %x[value_1], %[storage]\n\t"
1636                 "stxp %w[tmp], %x[value_0], %x[value_1], %[storage]\n\t"
1637                 "cbnz %w[tmp], 1b\n\t"
1638                 : [tmp] "=&r" (tmp), [value_0] "=&r" (v.as_uint64[0u]), [value_1] "=&r" (v.as_uint64[1u])
1639                 : [storage] "Q" (storage)
1640                 : "memory"
1641             );
1642         }
1643         else
1644         {
1645             __asm__ __volatile__
1646             (
1647                 "1:\n\t"
1648                 "ldxp %x[value_0], %x[value_1], %[storage]\n\t"
1649                 "stxp %w[tmp], %x[value_0], %x[value_1], %[storage]\n\t"
1650                 "cbnz %w[tmp], 1b\n\t"
1651                 : [tmp] "=&r" (tmp), [value_0] "=&r" (v.as_uint64[0u]), [value_1] "=&r" (v.as_uint64[1u])
1652                 : [storage] "Q" (storage)
1653                 : "memory"
1654             );
1655         }
1656 
1657         return v.as_storage;
1658     }
1659 
1660     static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1661     {
1662         storage_union original;
1663         storage_union value = { v };
1664         uint32_t tmp;
1665 
1666 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1667         __asm__ __volatile__\
1668         (\
1669             "1:\n\t"\
1670             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1671             "st" st_mo "xp %w[tmp], %x[value_0], %x[value_1], %[storage]\n\t"\
1672             "cbnz %w[tmp], 1b\n\t"\
1673             : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u])\
1674             : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
1675             : "memory"\
1676         );
1677 
1678         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1679 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1680 
1681         return original.as_storage;
1682     }
1683 
1684     static BOOST_FORCEINLINE bool compare_exchange_weak(
1685         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1686     {
1687         storage_union original;
1688         storage_union e = { expected };
1689         storage_union d = { desired };
1690         bool success;
1691 
1692 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1693         __asm__ __volatile__\
1694         (\
1695             "mov %w[success], #0\n\t"\
1696             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1697             "cmp %x[original_0], %x[expected_0]\n\t"\
1698             "ccmp %x[original_1], %x[expected_1], #0, eq\n\t"\
1699             "b.ne 1f\n\t"\
1700             "st" st_mo "xp %w[success], %x[desired_0], %x[desired_1], %[storage]\n\t"\
1701             "eor %w[success], %w[success], #1\n\t"\
1702             "1:\n\t"\
1703             : [success] "=&r" (success), [storage] "+Q" (storage), [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u])\
1704             : [desired_0] "r" (d.as_uint64[0u]), [desired_1] "r" (d.as_uint64[1u]), [expected_0] "r" (e.as_uint64[0u]), [expected_1] "r" (e.as_uint64[1u])\
1705             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1706         );
1707 
1708         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1709 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1710 
1711         expected = original.as_storage;
1712         return success;
1713     }
1714 
1715     static BOOST_FORCEINLINE bool compare_exchange_strong(
1716         storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
1717     {
1718         storage_union original;
1719         storage_union e = { expected };
1720         storage_union d = { desired };
1721         bool success;
1722 
1723 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1724         __asm__ __volatile__\
1725         (\
1726             "1:\n\t"\
1727             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1728             "cmp %x[original_0], %x[expected_0]\n\t"\
1729             "ccmp %x[original_1], %x[expected_1], #0, eq\n\t"\
1730             "b.ne 2f\n\t"\
1731             "st" st_mo "xp %w[success], %x[desired_0], %x[desired_1], %[storage]\n\t"\
1732             "cbnz %w[success], 1b\n\t"\
1733             "2:\n\t"\
1734             "cset %w[success], eq\n\t"\
1735             : [success] "=&r" (success), [storage] "+Q" (storage), [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u])\
1736             : [desired_0] "r" (d.as_uint64[0u]), [desired_1] "r" (d.as_uint64[1u]), [expected_0] "r" (e.as_uint64[0u]), [expected_1] "r" (e.as_uint64[1u])\
1737             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1738         );
1739 
1740         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(success_order)
1741 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1742 
1743         expected = original.as_storage;
1744         return success;
1745     }
1746 
1747     static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1748     {
1749         storage_union original;
1750         storage_union value = { v };
1751         storage_union result;
1752         uint32_t tmp;
1753 
1754 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1755         __asm__ __volatile__\
1756         (\
1757             "1:\n\t"\
1758             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1759             "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[original_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
1760             "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[original_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
1761             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1762             "cbnz %w[tmp], 1b\n\t"\
1763             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1764               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1765               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1766             : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
1767             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1768         );
1769 
1770         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1771 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1772 
1773         return original.as_storage;
1774     }
1775 
1776     static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1777     {
1778         storage_union original;
1779         storage_union value = { v };
1780         storage_union result;
1781         uint32_t tmp;
1782 
1783 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1784         __asm__ __volatile__\
1785         (\
1786             "1:\n\t"\
1787             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1788             "subs %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[original_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
1789             "sbc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[original_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
1790             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1791             "cbnz %w[tmp], 1b\n\t"\
1792             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1793               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1794               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1795             : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
1796             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1797         );
1798 
1799         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1800 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1801 
1802         return original.as_storage;
1803     }
1804 
1805     static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1806     {
1807         storage_union original;
1808         storage_union value = { v };
1809         storage_union result;
1810         uint32_t tmp;
1811 
1812 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1813         __asm__ __volatile__\
1814         (\
1815             "1:\n\t"\
1816             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1817             "and %x[result_0], %x[original_0], %x[value_0]\n\t"\
1818             "and %x[result_1], %x[original_1], %x[value_1]\n\t"\
1819             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1820             "cbnz %w[tmp], 1b\n\t"\
1821             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1822               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1823               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1824             : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
1825             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1826         );
1827 
1828         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1829 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1830 
1831         return original.as_storage;
1832     }
1833 
1834     static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1835     {
1836         storage_union original;
1837         storage_union value = { v };
1838         storage_union result;
1839         uint32_t tmp;
1840 
1841 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1842         __asm__ __volatile__\
1843         (\
1844             "1:\n\t"\
1845             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1846             "orr %x[result_0], %x[original_0], %x[value_0]\n\t"\
1847             "orr %x[result_1], %x[original_1], %x[value_1]\n\t"\
1848             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1849             "cbnz %w[tmp], 1b\n\t"\
1850             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1851               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1852               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1853             : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
1854             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1855         );
1856 
1857         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1858 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1859 
1860         return original.as_storage;
1861     }
1862 
1863     static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1864     {
1865         storage_union original;
1866         storage_union value = { v };
1867         storage_union result;
1868         uint32_t tmp;
1869 
1870 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1871         __asm__ __volatile__\
1872         (\
1873             "1:\n\t"\
1874             "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1875             "eor %x[result_0], %x[original_0], %x[value_0]\n\t"\
1876             "eor %x[result_1], %x[original_1], %x[value_1]\n\t"\
1877             "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1878             "cbnz %w[tmp], 1b\n\t"\
1879             : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1880               [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1881               [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1882             : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
1883             : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1884         );
1885 
1886         BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1887 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1888 
1889         return original.as_storage;
1890     }
1891 
1892     static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1893     {
1894         return !!exchange(storage, (storage_type)1, order);
1895     }
1896 
1897     static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1898     {
1899         store(storage, (storage_type)0, order);
1900     }
1901 };
1902 
1903 } // namespace detail
1904 } // namespace atomics
1905 } // namespace boost
1906 
1907 #include <boost/atomic/detail/footer.hpp>
1908 
1909 #endif // BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_GCC_AARCH64_HPP_INCLUDED_