File indexing completed on 2025-01-30 09:33:54
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014 #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH64_HPP_INCLUDED_
0015 #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH64_HPP_INCLUDED_
0016
0017 #include <cstddef>
0018 #include <boost/cstdint.hpp>
0019 #include <boost/memory_order.hpp>
0020 #include <boost/atomic/detail/config.hpp>
0021 #include <boost/atomic/detail/platform.hpp>
0022 #include <boost/atomic/detail/storage_traits.hpp>
0023 #include <boost/atomic/detail/extra_operations_fwd.hpp>
0024 #include <boost/atomic/detail/extra_ops_generic.hpp>
0025 #include <boost/atomic/detail/ops_gcc_aarch64_common.hpp>
0026 #include <boost/atomic/detail/capabilities.hpp>
0027 #include <boost/atomic/detail/header.hpp>
0028
0029 #ifdef BOOST_HAS_PRAGMA_ONCE
0030 #pragma once
0031 #endif
0032
0033 namespace boost {
0034 namespace atomics {
0035 namespace detail {
0036
0037 template< typename Base >
0038 struct extra_operations_gcc_aarch64_common :
0039 public Base
0040 {
0041 typedef Base base_type;
0042 typedef typename base_type::storage_type storage_type;
0043
0044
0045
0046
0047
0048 static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0049 {
0050 base_type::negate(storage, order);
0051 }
0052
0053 static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0054 {
0055 base_type::bitwise_complement(storage, order);
0056 }
0057
0058 static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0059 {
0060 base_type::add(storage, v, order);
0061 }
0062
0063 static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0064 {
0065 base_type::sub(storage, v, order);
0066 }
0067
0068 static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0069 {
0070 base_type::bitwise_and(storage, v, order);
0071 }
0072
0073 static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0074 {
0075 base_type::bitwise_or(storage, v, order);
0076 }
0077
0078 static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0079 {
0080 base_type::bitwise_xor(storage, v, order);
0081 }
0082
0083 static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0084 {
0085 return !!base_type::negate(storage, order);
0086 }
0087
0088 static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0089 {
0090 return !!base_type::add(storage, v, order);
0091 }
0092
0093 static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0094 {
0095 return !!base_type::sub(storage, v, order);
0096 }
0097
0098 static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0099 {
0100 return !!base_type::bitwise_and(storage, v, order);
0101 }
0102
0103 static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0104 {
0105 return !!base_type::bitwise_or(storage, v, order);
0106 }
0107
0108 static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0109 {
0110 return !!base_type::bitwise_xor(storage, v, order);
0111 }
0112
0113 static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0114 {
0115 return !!base_type::bitwise_complement(storage, order);
0116 }
0117 };
0118
0119 template< typename Base, std::size_t Size, bool Signed >
0120 struct extra_operations_gcc_aarch64;
0121
0122 template< typename Base, bool Signed >
0123 struct extra_operations_gcc_aarch64< Base, 1u, Signed > :
0124 public extra_operations_generic< Base, 1u, Signed >
0125 {
0126 typedef extra_operations_generic< Base, 1u, Signed > base_type;
0127 typedef typename base_type::storage_type storage_type;
0128
0129 static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0130 {
0131 storage_type original, result;
0132 uint32_t tmp;
0133
0134 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0135 __asm__ __volatile__\
0136 (\
0137 "1:\n\t"\
0138 "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0139 "neg %w[result], %w[original]\n\t"\
0140 "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0141 "cbnz %w[tmp], 1b\n\t"\
0142 : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0143 : \
0144 : "memory"\
0145 );
0146
0147 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0148 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0149
0150 return original;
0151 }
0152
0153 static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0154 {
0155 storage_type result;
0156 uint32_t tmp;
0157
0158 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0159 __asm__ __volatile__\
0160 (\
0161 "1:\n\t"\
0162 "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
0163 "neg %w[result], %w[result]\n\t"\
0164 "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0165 "cbnz %w[tmp], 1b\n\t"\
0166 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0167 : \
0168 : "memory"\
0169 );
0170
0171 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0172 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0173
0174 return result;
0175 }
0176
0177 #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0178
0179 static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0180 {
0181 storage_type result;
0182 uint32_t tmp;
0183
0184 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0185 __asm__ __volatile__\
0186 (\
0187 "1:\n\t"\
0188 "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
0189 "add %w[result], %w[result], %w[value]\n\t"\
0190 "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0191 "cbnz %w[tmp], 1b\n\t"\
0192 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0193 : [value] "Ir" (v)\
0194 : "memory"\
0195 );
0196
0197 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0198 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0199
0200 return result;
0201 }
0202
0203 static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0204 {
0205 storage_type result;
0206 uint32_t tmp;
0207
0208 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0209 __asm__ __volatile__\
0210 (\
0211 "1:\n\t"\
0212 "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
0213 "sub %w[result], %w[result], %w[value]\n\t"\
0214 "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0215 "cbnz %w[tmp], 1b\n\t"\
0216 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0217 : [value] "Ir" (v)\
0218 : "memory"\
0219 );
0220
0221 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0222 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0223
0224 return result;
0225 }
0226
0227 static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0228 {
0229 storage_type result;
0230 uint32_t tmp;
0231
0232 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0233 __asm__ __volatile__\
0234 (\
0235 "1:\n\t"\
0236 "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
0237 "and %w[result], %w[result], %w[value]\n\t"\
0238 "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0239 "cbnz %w[tmp], 1b\n\t"\
0240 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0241 : [value] "Kr" (v)\
0242 : "memory"\
0243 );
0244
0245 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0246 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0247
0248 return result;
0249 }
0250
0251 static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0252 {
0253 storage_type result;
0254 uint32_t tmp;
0255
0256 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0257 __asm__ __volatile__\
0258 (\
0259 "1:\n\t"\
0260 "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
0261 "orr %w[result], %w[result], %w[value]\n\t"\
0262 "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0263 "cbnz %w[tmp], 1b\n\t"\
0264 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0265 : [value] "Kr" (v)\
0266 : "memory"\
0267 );
0268
0269 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0270 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0271
0272 return result;
0273 }
0274
0275 static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0276 {
0277 storage_type result;
0278 uint32_t tmp;
0279
0280 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0281 __asm__ __volatile__\
0282 (\
0283 "1:\n\t"\
0284 "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
0285 "eor %w[result], %w[result], %w[value]\n\t"\
0286 "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0287 "cbnz %w[tmp], 1b\n\t"\
0288 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0289 : [value] "Kr" (v)\
0290 : "memory"\
0291 );
0292
0293 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0294 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0295
0296 return result;
0297 }
0298
0299 static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0300 {
0301 storage_type original, result;
0302 uint32_t tmp;
0303
0304 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0305 __asm__ __volatile__\
0306 (\
0307 "1:\n\t"\
0308 "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
0309 "mvn %w[result], %w[original]\n\t"\
0310 "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0311 "cbnz %w[tmp], 1b\n\t"\
0312 : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0313 : \
0314 : "memory"\
0315 );
0316
0317 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0318 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0319
0320 return original;
0321 }
0322
0323 static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0324 {
0325 storage_type result;
0326 uint32_t tmp;
0327
0328 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0329 __asm__ __volatile__\
0330 (\
0331 "1:\n\t"\
0332 "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
0333 "mvn %w[result], %w[result]\n\t"\
0334 "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
0335 "cbnz %w[tmp], 1b\n\t"\
0336 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0337 : \
0338 : "memory"\
0339 );
0340
0341 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0342 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0343
0344 return result;
0345 }
0346
0347 #endif
0348 };
0349
0350 template< typename Base, bool Signed >
0351 struct extra_operations< Base, 1u, Signed, true > :
0352 public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 1u, Signed > >
0353 {
0354 };
0355
0356
0357 template< typename Base, bool Signed >
0358 struct extra_operations_gcc_aarch64< Base, 2u, Signed > :
0359 public extra_operations_generic< Base, 2u, Signed >
0360 {
0361 typedef extra_operations_generic< Base, 2u, Signed > base_type;
0362 typedef typename base_type::storage_type storage_type;
0363
0364 static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0365 {
0366 storage_type original, result;
0367 uint32_t tmp;
0368
0369 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0370 __asm__ __volatile__\
0371 (\
0372 "1:\n\t"\
0373 "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0374 "neg %w[result], %w[original]\n\t"\
0375 "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0376 "cbnz %w[tmp], 1b\n\t"\
0377 : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0378 : \
0379 : "memory"\
0380 );
0381
0382 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0383 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0384
0385 return original;
0386 }
0387
0388 static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0389 {
0390 storage_type result;
0391 uint32_t tmp;
0392
0393 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0394 __asm__ __volatile__\
0395 (\
0396 "1:\n\t"\
0397 "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
0398 "neg %w[result], %w[result]\n\t"\
0399 "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0400 "cbnz %w[tmp], 1b\n\t"\
0401 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0402 : \
0403 : "memory"\
0404 );
0405
0406 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0407 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0408
0409 return result;
0410 }
0411
0412 #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0413
0414 static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0415 {
0416 storage_type result;
0417 uint32_t tmp;
0418
0419 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0420 __asm__ __volatile__\
0421 (\
0422 "1:\n\t"\
0423 "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
0424 "add %w[result], %w[result], %w[value]\n\t"\
0425 "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0426 "cbnz %w[tmp], 1b\n\t"\
0427 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0428 : [value] "Ir" (v)\
0429 : "memory"\
0430 );
0431
0432 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0433 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0434
0435 return result;
0436 }
0437
0438 static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0439 {
0440 storage_type result;
0441 uint32_t tmp;
0442
0443 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0444 __asm__ __volatile__\
0445 (\
0446 "1:\n\t"\
0447 "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
0448 "sub %w[result], %w[result], %w[value]\n\t"\
0449 "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0450 "cbnz %w[tmp], 1b\n\t"\
0451 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0452 : [value] "Ir" (v)\
0453 : "memory"\
0454 );
0455
0456 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0457 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0458
0459 return result;
0460 }
0461
0462 static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0463 {
0464 storage_type result;
0465 uint32_t tmp;
0466
0467 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0468 __asm__ __volatile__\
0469 (\
0470 "1:\n\t"\
0471 "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
0472 "and %w[result], %w[result], %w[value]\n\t"\
0473 "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0474 "cbnz %w[tmp], 1b\n\t"\
0475 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0476 : [value] "Kr" (v)\
0477 : "memory"\
0478 );
0479
0480 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0481 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0482
0483 return result;
0484 }
0485
0486 static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0487 {
0488 storage_type result;
0489 uint32_t tmp;
0490
0491 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0492 __asm__ __volatile__\
0493 (\
0494 "1:\n\t"\
0495 "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
0496 "orr %w[result], %w[result], %w[value]\n\t"\
0497 "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0498 "cbnz %w[tmp], 1b\n\t"\
0499 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0500 : [value] "Kr" (v)\
0501 : "memory"\
0502 );
0503
0504 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0505 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0506
0507 return result;
0508 }
0509
0510 static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0511 {
0512 storage_type result;
0513 uint32_t tmp;
0514
0515 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0516 __asm__ __volatile__\
0517 (\
0518 "1:\n\t"\
0519 "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
0520 "eor %w[result], %w[result], %w[value]\n\t"\
0521 "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0522 "cbnz %w[tmp], 1b\n\t"\
0523 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0524 : [value] "Kr" (v)\
0525 : "memory"\
0526 );
0527
0528 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0529 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0530
0531 return result;
0532 }
0533
0534 static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0535 {
0536 storage_type original, result;
0537 uint32_t tmp;
0538
0539 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0540 __asm__ __volatile__\
0541 (\
0542 "1:\n\t"\
0543 "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
0544 "mvn %w[result], %w[original]\n\t"\
0545 "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0546 "cbnz %w[tmp], 1b\n\t"\
0547 : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0548 : \
0549 : "memory"\
0550 );
0551
0552 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0553 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0554
0555 return original;
0556 }
0557
0558 static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0559 {
0560 storage_type result;
0561 uint32_t tmp;
0562
0563 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0564 __asm__ __volatile__\
0565 (\
0566 "1:\n\t"\
0567 "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
0568 "mvn %w[result], %w[result]\n\t"\
0569 "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
0570 "cbnz %w[tmp], 1b\n\t"\
0571 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0572 : \
0573 : "memory"\
0574 );
0575
0576 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0577 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0578
0579 return result;
0580 }
0581
0582 #endif
0583 };
0584
0585 template< typename Base, bool Signed >
0586 struct extra_operations< Base, 2u, Signed, true > :
0587 public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 2u, Signed > >
0588 {
0589 };
0590
0591
0592 template< typename Base, bool Signed >
0593 struct extra_operations_gcc_aarch64< Base, 4u, Signed > :
0594 public extra_operations_generic< Base, 4u, Signed >
0595 {
0596 typedef extra_operations_generic< Base, 4u, Signed > base_type;
0597 typedef typename base_type::storage_type storage_type;
0598
0599 static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0600 {
0601 storage_type original, result;
0602 uint32_t tmp;
0603
0604 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0605 __asm__ __volatile__\
0606 (\
0607 "1:\n\t"\
0608 "ld" ld_mo "xr %w[original], %[storage]\n\t"\
0609 "neg %w[result], %w[original]\n\t"\
0610 "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
0611 "cbnz %w[tmp], 1b\n\t"\
0612 : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0613 : \
0614 : "memory"\
0615 );
0616
0617 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0618 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0619
0620 return original;
0621 }
0622
0623 static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0624 {
0625 storage_type result;
0626 uint32_t tmp;
0627
0628 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0629 __asm__ __volatile__\
0630 (\
0631 "1:\n\t"\
0632 "ld" ld_mo "xr %w[result], %[storage]\n\t"\
0633 "neg %w[result], %w[result]\n\t"\
0634 "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
0635 "cbnz %w[tmp], 1b\n\t"\
0636 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0637 : \
0638 : "memory"\
0639 );
0640
0641 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0642 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0643
0644 return result;
0645 }
0646
0647 #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0648
0649 static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0650 {
0651 storage_type result;
0652 uint32_t tmp;
0653
0654 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0655 __asm__ __volatile__\
0656 (\
0657 "1:\n\t"\
0658 "ld" ld_mo "xr %w[result], %[storage]\n\t"\
0659 "add %w[result], %w[result], %w[value]\n\t"\
0660 "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
0661 "cbnz %w[tmp], 1b\n\t"\
0662 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0663 : [value] "Ir" (v)\
0664 : "memory"\
0665 );
0666
0667 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0668 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0669
0670 return result;
0671 }
0672
0673 static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0674 {
0675 storage_type result;
0676 uint32_t tmp;
0677
0678 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0679 __asm__ __volatile__\
0680 (\
0681 "1:\n\t"\
0682 "ld" ld_mo "xr %w[result], %[storage]\n\t"\
0683 "sub %w[result], %w[result], %w[value]\n\t"\
0684 "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
0685 "cbnz %w[tmp], 1b\n\t"\
0686 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0687 : [value] "Ir" (v)\
0688 : "memory"\
0689 );
0690
0691 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0692 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0693
0694 return result;
0695 }
0696
0697 static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0698 {
0699 storage_type result;
0700 uint32_t tmp;
0701
0702 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0703 __asm__ __volatile__\
0704 (\
0705 "1:\n\t"\
0706 "ld" ld_mo "xr %w[result], %[storage]\n\t"\
0707 "and %w[result], %w[result], %w[value]\n\t"\
0708 "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
0709 "cbnz %w[tmp], 1b\n\t"\
0710 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0711 : [value] "Kr" (v)\
0712 : "memory"\
0713 );
0714
0715 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0716 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0717
0718 return result;
0719 }
0720
0721 static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0722 {
0723 storage_type result;
0724 uint32_t tmp;
0725
0726 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0727 __asm__ __volatile__\
0728 (\
0729 "1:\n\t"\
0730 "ld" ld_mo "xr %w[result], %[storage]\n\t"\
0731 "orr %w[result], %w[result], %w[value]\n\t"\
0732 "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
0733 "cbnz %w[tmp], 1b\n\t"\
0734 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0735 : [value] "Kr" (v)\
0736 : "memory"\
0737 );
0738
0739 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0740 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0741
0742 return result;
0743 }
0744
0745 static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0746 {
0747 storage_type result;
0748 uint32_t tmp;
0749
0750 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0751 __asm__ __volatile__\
0752 (\
0753 "1:\n\t"\
0754 "ld" ld_mo "xr %w[result], %[storage]\n\t"\
0755 "eor %w[result], %w[result], %w[value]\n\t"\
0756 "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
0757 "cbnz %w[tmp], 1b\n\t"\
0758 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0759 : [value] "Kr" (v)\
0760 : "memory"\
0761 );
0762
0763 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0764 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0765
0766 return result;
0767 }
0768
0769 static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0770 {
0771 storage_type original, result;
0772 uint32_t tmp;
0773
0774 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0775 __asm__ __volatile__\
0776 (\
0777 "1:\n\t"\
0778 "ld" ld_mo "xr %w[original], %[storage]\n\t"\
0779 "mvn %w[result], %w[original]\n\t"\
0780 "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
0781 "cbnz %w[tmp], 1b\n\t"\
0782 : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0783 : \
0784 : "memory"\
0785 );
0786
0787 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0788 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0789
0790 return original;
0791 }
0792
0793 static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0794 {
0795 storage_type result;
0796 uint32_t tmp;
0797
0798 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0799 __asm__ __volatile__\
0800 (\
0801 "1:\n\t"\
0802 "ld" ld_mo "xr %w[result], %[storage]\n\t"\
0803 "mvn %w[result], %w[result]\n\t"\
0804 "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
0805 "cbnz %w[tmp], 1b\n\t"\
0806 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0807 : \
0808 : "memory"\
0809 );
0810
0811 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0812 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0813
0814 return result;
0815 }
0816
0817 #endif
0818 };
0819
0820 template< typename Base, bool Signed >
0821 struct extra_operations< Base, 4u, Signed, true > :
0822 public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 4u, Signed > >
0823 {
0824 };
0825
0826
0827 template< typename Base, bool Signed >
0828 struct extra_operations_gcc_aarch64< Base, 8u, Signed > :
0829 public extra_operations_generic< Base, 8u, Signed >
0830 {
0831 typedef extra_operations_generic< Base, 8u, Signed > base_type;
0832 typedef typename base_type::storage_type storage_type;
0833
0834 static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0835 {
0836 storage_type original, result;
0837 uint32_t tmp;
0838
0839 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0840 __asm__ __volatile__\
0841 (\
0842 "1:\n\t"\
0843 "ld" ld_mo "xr %x[original], %[storage]\n\t"\
0844 "neg %x[result], %x[original]\n\t"\
0845 "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
0846 "cbnz %w[tmp], 1b\n\t"\
0847 : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
0848 : \
0849 : "memory"\
0850 );
0851
0852 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0853 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0854
0855 return original;
0856 }
0857
0858 static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0859 {
0860 storage_type result;
0861 uint32_t tmp;
0862
0863 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0864 __asm__ __volatile__\
0865 (\
0866 "1:\n\t"\
0867 "ld" ld_mo "xr %x[result], %[storage]\n\t"\
0868 "neg %x[result], %x[result]\n\t"\
0869 "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
0870 "cbnz %w[tmp], 1b\n\t"\
0871 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0872 : \
0873 : "memory"\
0874 );
0875
0876 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0877 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0878
0879 return result;
0880 }
0881
0882 #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
0883
0884 static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0885 {
0886 storage_type result;
0887 uint32_t tmp;
0888
0889 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0890 __asm__ __volatile__\
0891 (\
0892 "1:\n\t"\
0893 "ld" ld_mo "xr %x[result], %[storage]\n\t"\
0894 "add %x[result], %x[result], %x[value]\n\t"\
0895 "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
0896 "cbnz %w[tmp], 1b\n\t"\
0897 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0898 : [value] "Ir" (v)\
0899 : "memory"\
0900 );
0901
0902 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0903 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0904
0905 return result;
0906 }
0907
0908 static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0909 {
0910 storage_type result;
0911 uint32_t tmp;
0912
0913 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0914 __asm__ __volatile__\
0915 (\
0916 "1:\n\t"\
0917 "ld" ld_mo "xr %x[result], %[storage]\n\t"\
0918 "sub %x[result], %x[result], %x[value]\n\t"\
0919 "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
0920 "cbnz %w[tmp], 1b\n\t"\
0921 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0922 : [value] "Ir" (v)\
0923 : "memory"\
0924 );
0925
0926 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0927 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0928
0929 return result;
0930 }
0931
0932 static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0933 {
0934 storage_type result;
0935 uint32_t tmp;
0936
0937 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0938 __asm__ __volatile__\
0939 (\
0940 "1:\n\t"\
0941 "ld" ld_mo "xr %x[result], %[storage]\n\t"\
0942 "and %x[result], %x[result], %x[value]\n\t"\
0943 "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
0944 "cbnz %w[tmp], 1b\n\t"\
0945 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0946 : [value] "Lr" (v)\
0947 : "memory"\
0948 );
0949
0950 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0951 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0952
0953 return result;
0954 }
0955
0956 static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0957 {
0958 storage_type result;
0959 uint32_t tmp;
0960
0961 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0962 __asm__ __volatile__\
0963 (\
0964 "1:\n\t"\
0965 "ld" ld_mo "xr %x[result], %[storage]\n\t"\
0966 "orr %x[result], %x[result], %x[value]\n\t"\
0967 "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
0968 "cbnz %w[tmp], 1b\n\t"\
0969 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0970 : [value] "Lr" (v)\
0971 : "memory"\
0972 );
0973
0974 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0975 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
0976
0977 return result;
0978 }
0979
0980 static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0981 {
0982 storage_type result;
0983 uint32_t tmp;
0984
0985 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
0986 __asm__ __volatile__\
0987 (\
0988 "1:\n\t"\
0989 "ld" ld_mo "xr %x[result], %[storage]\n\t"\
0990 "eor %x[result], %x[result], %x[value]\n\t"\
0991 "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
0992 "cbnz %w[tmp], 1b\n\t"\
0993 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
0994 : [value] "Lr" (v)\
0995 : "memory"\
0996 );
0997
0998 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
0999 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1000
1001 return result;
1002 }
1003
1004 static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1005 {
1006 storage_type original, result;
1007 uint32_t tmp;
1008
1009 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1010 __asm__ __volatile__\
1011 (\
1012 "1:\n\t"\
1013 "ld" ld_mo "xr %x[original], %[storage]\n\t"\
1014 "mvn %x[result], %x[original]\n\t"\
1015 "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1016 "cbnz %w[tmp], 1b\n\t"\
1017 : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
1018 : \
1019 : "memory"\
1020 );
1021
1022 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1023 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1024
1025 return original;
1026 }
1027
1028 static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1029 {
1030 storage_type result;
1031 uint32_t tmp;
1032
1033 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1034 __asm__ __volatile__\
1035 (\
1036 "1:\n\t"\
1037 "ld" ld_mo "xr %x[result], %[storage]\n\t"\
1038 "mvn %x[result], %x[result]\n\t"\
1039 "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
1040 "cbnz %w[tmp], 1b\n\t"\
1041 : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
1042 : \
1043 : "memory"\
1044 );
1045
1046 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1047 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1048
1049 return result;
1050 }
1051
1052 #endif
1053 };
1054
1055 template< typename Base, bool Signed >
1056 struct extra_operations< Base, 8u, Signed, true > :
1057 public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 8u, Signed > >
1058 {
1059 };
1060
1061
1062 template< typename Base, bool Signed >
1063 struct extra_operations_gcc_aarch64< Base, 16u, Signed > :
1064 public extra_operations_generic< Base, 16u, Signed >
1065 {
1066 typedef extra_operations_generic< Base, 16u, Signed > base_type;
1067 typedef typename base_type::storage_type storage_type;
1068 typedef typename base_type::storage_union storage_union;
1069
1070 static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1071 {
1072 storage_union original;
1073 storage_union result;
1074 uint32_t tmp;
1075
1076 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1077 __asm__ __volatile__\
1078 (\
1079 "1:\n\t"\
1080 "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1081 "mvn %x[result_0], %x[original_0]\n\t"\
1082 "mvn %x[result_1], %x[original_1]\n\t"\
1083 "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], #1\n\t"\
1084 "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], xzr\n\t"\
1085 "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1086 "cbnz %w[tmp], 1b\n\t"\
1087 : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1088 [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1089 [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1090 : \
1091 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1092 );
1093
1094 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1095 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1096
1097 return original.as_storage;
1098 }
1099
1100 static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1101 {
1102 storage_union result;
1103 uint32_t tmp;
1104
1105 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1106 __asm__ __volatile__\
1107 (\
1108 "1:\n\t"\
1109 "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
1110 "mvn %x[result_0], %x[result_0]\n\t"\
1111 "mvn %x[result_1], %x[result_1]\n\t"\
1112 "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], #1\n\t"\
1113 "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], xzr\n\t"\
1114 "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1115 "cbnz %w[tmp], 1b\n\t"\
1116 : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1117 [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1118 : \
1119 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1120 );
1121
1122 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1123 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1124
1125 return result.as_storage;
1126 }
1127
1128 static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1129 {
1130 storage_union result;
1131 storage_union value = { v };
1132 uint32_t tmp;
1133
1134 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1135 __asm__ __volatile__\
1136 (\
1137 "1:\n\t"\
1138 "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
1139 "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
1140 "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
1141 "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1142 "cbnz %w[tmp], 1b\n\t"\
1143 : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1144 [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1145 : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
1146 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1147 );
1148
1149 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1150 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1151
1152 return result.as_storage;
1153 }
1154
1155 static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1156 {
1157 storage_union result;
1158 storage_union value = { v };
1159 uint32_t tmp;
1160
1161 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1162 __asm__ __volatile__\
1163 (\
1164 "1:\n\t"\
1165 "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
1166 "subs %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
1167 "sbc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
1168 "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1169 "cbnz %w[tmp], 1b\n\t"\
1170 : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1171 [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1172 : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
1173 : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
1174 );
1175
1176 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1177 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1178
1179 return result.as_storage;
1180 }
1181
1182 static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1183 {
1184 storage_union result;
1185 storage_union value = { v };
1186 uint32_t tmp;
1187
1188 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1189 __asm__ __volatile__\
1190 (\
1191 "1:\n\t"\
1192 "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
1193 "and %x[result_0], %x[result_0], %x[value_0]\n\t"\
1194 "and %x[result_1], %x[result_1], %x[value_1]\n\t"\
1195 "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1196 "cbnz %w[tmp], 1b\n\t"\
1197 : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1198 [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1199 : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
1200 : "memory"\
1201 );
1202
1203 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1204 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1205
1206 return result.as_storage;
1207 }
1208
1209 static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1210 {
1211 storage_union result;
1212 storage_union value = { v };
1213 uint32_t tmp;
1214
1215 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1216 __asm__ __volatile__\
1217 (\
1218 "1:\n\t"\
1219 "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
1220 "orr %x[result_0], %x[result_0], %x[value_0]\n\t"\
1221 "orr %x[result_1], %x[result_1], %x[value_1]\n\t"\
1222 "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1223 "cbnz %w[tmp], 1b\n\t"\
1224 : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1225 [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1226 : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
1227 : "memory"\
1228 );
1229
1230 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1231 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1232
1233 return result.as_storage;
1234 }
1235
1236 static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
1237 {
1238 storage_union result;
1239 storage_union value = { v };
1240 uint32_t tmp;
1241
1242 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1243 __asm__ __volatile__\
1244 (\
1245 "1:\n\t"\
1246 "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
1247 "eor %x[result_0], %x[result_0], %x[value_0]\n\t"\
1248 "eor %x[result_1], %x[result_1], %x[value_1]\n\t"\
1249 "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1250 "cbnz %w[tmp], 1b\n\t"\
1251 : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1252 [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1253 : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
1254 : "memory"\
1255 );
1256
1257 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1258 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1259
1260 return result.as_storage;
1261 }
1262
1263 static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1264 {
1265 storage_union original;
1266 storage_union result;
1267 uint32_t tmp;
1268
1269 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1270 __asm__ __volatile__\
1271 (\
1272 "1:\n\t"\
1273 "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
1274 "mvn %x[result_0], %x[original_0]\n\t"\
1275 "mvn %x[result_1], %x[original_1]\n\t"\
1276 "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1277 "cbnz %w[tmp], 1b\n\t"\
1278 : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1279 [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
1280 [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1281 : \
1282 : "memory"\
1283 );
1284
1285 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1286 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1287
1288 return original.as_storage;
1289 }
1290
1291 static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
1292 {
1293 storage_union result;
1294 uint32_t tmp;
1295
1296 #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
1297 __asm__ __volatile__\
1298 (\
1299 "1:\n\t"\
1300 "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
1301 "mvn %x[result_0], %x[result_0]\n\t"\
1302 "mvn %x[result_1], %x[result_1]\n\t"\
1303 "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
1304 "cbnz %w[tmp], 1b\n\t"\
1305 : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
1306 [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
1307 : \
1308 : "memory"\
1309 );
1310
1311 BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
1312 #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
1313
1314 return result.as_storage;
1315 }
1316 };
1317
1318 template< typename Base, bool Signed >
1319 struct extra_operations< Base, 16u, Signed, true > :
1320 public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 16u, Signed > >
1321 {
1322 };
1323
1324 }
1325 }
1326 }
1327
1328 #include <boost/atomic/detail/footer.hpp>
1329
1330 #endif