File indexing completed on 2025-01-30 09:33:53
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016 #ifndef BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_MSVC_X86_HPP_INCLUDED_
0017 #define BOOST_ATOMIC_DETAIL_CORE_ARCH_OPS_MSVC_X86_HPP_INCLUDED_
0018
0019 #include <cstddef>
0020 #include <boost/cstdint.hpp>
0021 #include <boost/memory_order.hpp>
0022 #include <boost/atomic/detail/config.hpp>
0023 #include <boost/atomic/detail/intptr.hpp>
0024 #include <boost/atomic/detail/interlocked.hpp>
0025 #include <boost/atomic/detail/storage_traits.hpp>
0026 #include <boost/atomic/detail/core_arch_operations_fwd.hpp>
0027 #include <boost/atomic/detail/type_traits/make_signed.hpp>
0028 #include <boost/atomic/detail/capabilities.hpp>
0029 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B) || defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
0030 #include <boost/cstdint.hpp>
0031 #include <boost/atomic/detail/cas_based_exchange.hpp>
0032 #include <boost/atomic/detail/core_ops_cas_based.hpp>
0033 #endif
0034 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B) && defined(__AVX__)
0035 #include <emmintrin.h>
0036 #include <boost/atomic/detail/string_ops.hpp>
0037 #endif
0038 #include <boost/atomic/detail/ops_msvc_common.hpp>
0039 #if !defined(_M_IX86) && !(defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8) && defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16))
0040 #include <boost/atomic/detail/extending_cas_based_arithmetic.hpp>
0041 #endif
0042 #include <boost/atomic/detail/header.hpp>
0043
0044 #ifdef BOOST_HAS_PRAGMA_ONCE
0045 #pragma once
0046 #endif
0047
0048 namespace boost {
0049 namespace atomics {
0050 namespace detail {
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069 struct core_arch_operations_msvc_x86_base
0070 {
0071 static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = false;
0072 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
0073
0074 static BOOST_FORCEINLINE void fence_before(memory_order) BOOST_NOEXCEPT
0075 {
0076 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0077 }
0078
0079 static BOOST_FORCEINLINE void fence_after(memory_order) BOOST_NOEXCEPT
0080 {
0081 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0082 }
0083
0084 static BOOST_FORCEINLINE void fence_after_load(memory_order) BOOST_NOEXCEPT
0085 {
0086 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0087
0088
0089
0090
0091
0092
0093 }
0094 };
0095
0096 template< std::size_t Size, bool Signed, bool Interprocess, typename Derived >
0097 struct core_arch_operations_msvc_x86 :
0098 public core_arch_operations_msvc_x86_base
0099 {
0100 typedef typename storage_traits< Size >::type storage_type;
0101
0102 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = Size;
0103 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = storage_traits< Size >::alignment;
0104 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
0105 static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
0106
0107 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0108 {
0109 if (order != memory_order_seq_cst)
0110 {
0111 fence_before(order);
0112 storage = v;
0113 fence_after(order);
0114 }
0115 else
0116 {
0117 Derived::exchange(storage, v, order);
0118 }
0119 }
0120
0121 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
0122 {
0123 storage_type v = storage;
0124 fence_after_load(order);
0125 return v;
0126 }
0127
0128 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0129 {
0130 typedef typename boost::atomics::detail::make_signed< storage_type >::type signed_storage_type;
0131 return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
0132 }
0133
0134 static BOOST_FORCEINLINE bool compare_exchange_weak(
0135 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0136 {
0137 return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
0138 }
0139
0140 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0141 {
0142 return !!Derived::exchange(storage, (storage_type)1, order);
0143 }
0144
0145 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
0146 {
0147 store(storage, (storage_type)0, order);
0148 }
0149 };
0150
0151 template< bool Signed, bool Interprocess >
0152 struct core_arch_operations< 4u, Signed, Interprocess > :
0153 public core_arch_operations_msvc_x86< 4u, Signed, Interprocess, core_arch_operations< 4u, Signed, Interprocess > >
0154 {
0155 typedef core_arch_operations_msvc_x86< 4u, Signed, Interprocess, core_arch_operations< 4u, Signed, Interprocess > > base_type;
0156 typedef typename base_type::storage_type storage_type;
0157
0158 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0159 {
0160 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
0161 }
0162
0163 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0164 {
0165 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
0166 }
0167
0168 static BOOST_FORCEINLINE bool compare_exchange_strong(
0169 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
0170 {
0171 storage_type previous = expected;
0172 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
0173 expected = old_val;
0174 return (previous == old_val);
0175 }
0176
0177 #if defined(BOOST_ATOMIC_INTERLOCKED_AND)
0178 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0179 {
0180 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
0181 }
0182 #else
0183 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0184 {
0185 storage_type res = storage;
0186 while (!compare_exchange_strong(storage, res, res & v, order, memory_order_relaxed)) {}
0187 return res;
0188 }
0189 #endif
0190
0191 #if defined(BOOST_ATOMIC_INTERLOCKED_OR)
0192 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0193 {
0194 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
0195 }
0196 #else
0197 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0198 {
0199 storage_type res = storage;
0200 while (!compare_exchange_strong(storage, res, res | v, order, memory_order_relaxed)) {}
0201 return res;
0202 }
0203 #endif
0204
0205 #if defined(BOOST_ATOMIC_INTERLOCKED_XOR)
0206 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0207 {
0208 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
0209 }
0210 #else
0211 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0212 {
0213 storage_type res = storage;
0214 while (!compare_exchange_strong(storage, res, res ^ v, order, memory_order_relaxed)) {}
0215 return res;
0216 }
0217 #endif
0218 };
0219
0220 #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8)
0221
0222 template< bool Signed, bool Interprocess >
0223 struct core_arch_operations< 1u, Signed, Interprocess > :
0224 public core_arch_operations_msvc_x86< 1u, Signed, Interprocess, core_arch_operations< 1u, Signed, Interprocess > >
0225 {
0226 typedef core_arch_operations_msvc_x86< 1u, Signed, Interprocess, core_arch_operations< 1u, Signed, Interprocess > > base_type;
0227 typedef typename base_type::storage_type storage_type;
0228
0229 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0230 {
0231 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
0232 }
0233
0234 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0235 {
0236 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
0237 }
0238
0239 static BOOST_FORCEINLINE bool compare_exchange_strong(
0240 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
0241 {
0242 storage_type previous = expected;
0243 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
0244 expected = old_val;
0245 return (previous == old_val);
0246 }
0247
0248 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0249 {
0250 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
0251 }
0252
0253 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0254 {
0255 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
0256 }
0257
0258 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0259 {
0260 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
0261 }
0262 };
0263
0264 #elif defined(_M_IX86)
0265
0266 template< bool Signed, bool Interprocess >
0267 struct core_arch_operations< 1u, Signed, Interprocess > :
0268 public core_arch_operations_msvc_x86< 1u, Signed, Interprocess, core_arch_operations< 1u, Signed, Interprocess > >
0269 {
0270 typedef core_arch_operations_msvc_x86< 1u, Signed, Interprocess, core_arch_operations< 1u, Signed, Interprocess > > base_type;
0271 typedef typename base_type::storage_type storage_type;
0272
0273 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0274 {
0275 base_type::fence_before(order);
0276 __asm
0277 {
0278 mov edx, storage
0279 movzx eax, v
0280 lock xadd byte ptr [edx], al
0281 mov v, al
0282 };
0283 base_type::fence_after(order);
0284 return v;
0285 }
0286
0287 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0288 {
0289 base_type::fence_before(order);
0290 __asm
0291 {
0292 mov edx, storage
0293 movzx eax, v
0294 xchg byte ptr [edx], al
0295 mov v, al
0296 };
0297 base_type::fence_after(order);
0298 return v;
0299 }
0300
0301 static BOOST_FORCEINLINE bool compare_exchange_strong(
0302 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order) BOOST_NOEXCEPT
0303 {
0304 base_type::fence_before(success_order);
0305 bool success;
0306 __asm
0307 {
0308 mov esi, expected
0309 mov edi, storage
0310 movzx eax, byte ptr [esi]
0311 movzx edx, desired
0312 lock cmpxchg byte ptr [edi], dl
0313 mov byte ptr [esi], al
0314 sete success
0315 };
0316
0317 base_type::fence_after(success_order);
0318 return success;
0319 }
0320
0321 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0322 {
0323 base_type::fence_before(order);
0324 __asm
0325 {
0326 mov edi, storage
0327 movzx ecx, v
0328 xor edx, edx
0329 movzx eax, byte ptr [edi]
0330 align 16
0331 again:
0332 mov dl, al
0333 and dl, cl
0334 lock cmpxchg byte ptr [edi], dl
0335 jne again
0336 mov v, al
0337 };
0338 base_type::fence_after(order);
0339 return v;
0340 }
0341
0342 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0343 {
0344 base_type::fence_before(order);
0345 __asm
0346 {
0347 mov edi, storage
0348 movzx ecx, v
0349 xor edx, edx
0350 movzx eax, byte ptr [edi]
0351 align 16
0352 again:
0353 mov dl, al
0354 or dl, cl
0355 lock cmpxchg byte ptr [edi], dl
0356 jne again
0357 mov v, al
0358 };
0359 base_type::fence_after(order);
0360 return v;
0361 }
0362
0363 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0364 {
0365 base_type::fence_before(order);
0366 __asm
0367 {
0368 mov edi, storage
0369 movzx ecx, v
0370 xor edx, edx
0371 movzx eax, byte ptr [edi]
0372 align 16
0373 again:
0374 mov dl, al
0375 xor dl, cl
0376 lock cmpxchg byte ptr [edi], dl
0377 jne again
0378 mov v, al
0379 };
0380 base_type::fence_after(order);
0381 return v;
0382 }
0383 };
0384
0385 #else
0386
0387 template< bool Signed, bool Interprocess >
0388 struct core_arch_operations< 1u, Signed, Interprocess > :
0389 public extending_cas_based_arithmetic< core_arch_operations< 4u, Signed, Interprocess >, 1u, Signed >
0390 {
0391 };
0392
0393 #endif
0394
0395 #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16)
0396
0397 template< bool Signed, bool Interprocess >
0398 struct core_arch_operations< 2u, Signed, Interprocess > :
0399 public core_arch_operations_msvc_x86< 2u, Signed, Interprocess, core_arch_operations< 2u, Signed, Interprocess > >
0400 {
0401 typedef core_arch_operations_msvc_x86< 2u, Signed, Interprocess, core_arch_operations< 2u, Signed, Interprocess > > base_type;
0402 typedef typename base_type::storage_type storage_type;
0403
0404 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0405 {
0406 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
0407 }
0408
0409 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0410 {
0411 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
0412 }
0413
0414 static BOOST_FORCEINLINE bool compare_exchange_strong(
0415 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
0416 {
0417 storage_type previous = expected;
0418 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
0419 expected = old_val;
0420 return (previous == old_val);
0421 }
0422
0423 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0424 {
0425 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
0426 }
0427
0428 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0429 {
0430 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
0431 }
0432
0433 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0434 {
0435 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
0436 }
0437 };
0438
0439 #elif defined(_M_IX86)
0440
0441 template< bool Signed, bool Interprocess >
0442 struct core_arch_operations< 2u, Signed, Interprocess > :
0443 public core_arch_operations_msvc_x86< 2u, Signed, Interprocess, core_arch_operations< 2u, Signed, Interprocess > >
0444 {
0445 typedef core_arch_operations_msvc_x86< 2u, Signed, Interprocess, core_arch_operations< 2u, Signed, Interprocess > > base_type;
0446 typedef typename base_type::storage_type storage_type;
0447
0448 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0449 {
0450 base_type::fence_before(order);
0451 __asm
0452 {
0453 mov edx, storage
0454 movzx eax, v
0455 lock xadd word ptr [edx], ax
0456 mov v, ax
0457 };
0458 base_type::fence_after(order);
0459 return v;
0460 }
0461
0462 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0463 {
0464 base_type::fence_before(order);
0465 __asm
0466 {
0467 mov edx, storage
0468 movzx eax, v
0469 xchg word ptr [edx], ax
0470 mov v, ax
0471 };
0472 base_type::fence_after(order);
0473 return v;
0474 }
0475
0476 static BOOST_FORCEINLINE bool compare_exchange_strong(
0477 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order) BOOST_NOEXCEPT
0478 {
0479 base_type::fence_before(success_order);
0480 bool success;
0481 __asm
0482 {
0483 mov esi, expected
0484 mov edi, storage
0485 movzx eax, word ptr [esi]
0486 movzx edx, desired
0487 lock cmpxchg word ptr [edi], dx
0488 mov word ptr [esi], ax
0489 sete success
0490 };
0491
0492 base_type::fence_after(success_order);
0493 return success;
0494 }
0495
0496 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0497 {
0498 base_type::fence_before(order);
0499 __asm
0500 {
0501 mov edi, storage
0502 movzx ecx, v
0503 xor edx, edx
0504 movzx eax, word ptr [edi]
0505 align 16
0506 again:
0507 mov dx, ax
0508 and dx, cx
0509 lock cmpxchg word ptr [edi], dx
0510 jne again
0511 mov v, ax
0512 };
0513 base_type::fence_after(order);
0514 return v;
0515 }
0516
0517 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0518 {
0519 base_type::fence_before(order);
0520 __asm
0521 {
0522 mov edi, storage
0523 movzx ecx, v
0524 xor edx, edx
0525 movzx eax, word ptr [edi]
0526 align 16
0527 again:
0528 mov dx, ax
0529 or dx, cx
0530 lock cmpxchg word ptr [edi], dx
0531 jne again
0532 mov v, ax
0533 };
0534 base_type::fence_after(order);
0535 return v;
0536 }
0537
0538 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0539 {
0540 base_type::fence_before(order);
0541 __asm
0542 {
0543 mov edi, storage
0544 movzx ecx, v
0545 xor edx, edx
0546 movzx eax, word ptr [edi]
0547 align 16
0548 again:
0549 mov dx, ax
0550 xor dx, cx
0551 lock cmpxchg word ptr [edi], dx
0552 jne again
0553 mov v, ax
0554 };
0555 base_type::fence_after(order);
0556 return v;
0557 }
0558 };
0559
0560 #else
0561
0562 template< bool Signed, bool Interprocess >
0563 struct core_arch_operations< 2u, Signed, Interprocess > :
0564 public extending_cas_based_arithmetic< core_arch_operations< 4u, Signed, Interprocess >, 2u, Signed >
0565 {
0566 };
0567
0568 #endif
0569
0570
0571 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B)
0572
0573 template< bool Signed, bool Interprocess >
0574 struct msvc_dcas_x86
0575 {
0576 typedef typename storage_traits< 8u >::type storage_type;
0577
0578 static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
0579 static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = true;
0580 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
0581
0582 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
0583 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 8u;
0584 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
0585
0586
0587
0588
0589
0590
0591
0592
0593
0594 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0595 {
0596 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0597
0598 storage_type volatile* p = &storage;
0599 if (BOOST_LIKELY(order != memory_order_seq_cst && ((uintptr_t)p & 7u) == 0u))
0600 {
0601 #if defined(_M_IX86_FP) && _M_IX86_FP >= 2
0602 #if defined(__AVX__)
0603 __asm
0604 {
0605 mov edx, p
0606 vmovq xmm4, v
0607 vmovq qword ptr [edx], xmm4
0608 };
0609 #else
0610 __asm
0611 {
0612 mov edx, p
0613 movq xmm4, v
0614 movq qword ptr [edx], xmm4
0615 };
0616 #endif
0617 #else
0618 __asm
0619 {
0620 mov edx, p
0621 fild v
0622 fistp qword ptr [edx]
0623 };
0624 #endif
0625 }
0626 else
0627 {
0628 uint32_t backup;
0629 __asm
0630 {
0631 mov backup, ebx
0632 mov edi, p
0633 mov ebx, dword ptr [v]
0634 mov ecx, dword ptr [v + 4]
0635 mov eax, dword ptr [edi]
0636 mov edx, dword ptr [edi + 4]
0637 align 16
0638 again:
0639 lock cmpxchg8b qword ptr [edi]
0640 jne again
0641 mov ebx, backup
0642 };
0643 }
0644
0645 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0646 }
0647
0648 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order) BOOST_NOEXCEPT
0649 {
0650 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0651
0652 storage_type const volatile* p = &storage;
0653 storage_type value;
0654
0655 if (BOOST_LIKELY(((uintptr_t)p & 7u) == 0u))
0656 {
0657 #if defined(_M_IX86_FP) && _M_IX86_FP >= 2
0658 #if defined(__AVX__)
0659 __asm
0660 {
0661 mov edx, p
0662 vmovq xmm4, qword ptr [edx]
0663 vmovq value, xmm4
0664 };
0665 #else
0666 __asm
0667 {
0668 mov edx, p
0669 movq xmm4, qword ptr [edx]
0670 movq value, xmm4
0671 };
0672 #endif
0673 #else
0674 __asm
0675 {
0676 mov edx, p
0677 fild qword ptr [edx]
0678 fistp value
0679 };
0680 #endif
0681 }
0682 else
0683 {
0684
0685
0686 __asm
0687 {
0688 mov edi, p
0689 mov eax, ebx
0690 mov edx, ecx
0691 lock cmpxchg8b qword ptr [edi]
0692 mov dword ptr [value], eax
0693 mov dword ptr [value + 4], edx
0694 };
0695 }
0696
0697 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0698
0699 return value;
0700 }
0701
0702 static BOOST_FORCEINLINE bool compare_exchange_strong(
0703 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
0704 {
0705
0706
0707 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0708
0709 storage_type volatile* p = &storage;
0710 #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
0711 const storage_type old_val = (storage_type)BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(p, desired, expected);
0712 const bool result = (old_val == expected);
0713 expected = old_val;
0714 #else
0715 bool result;
0716 uint32_t backup;
0717 __asm
0718 {
0719 mov backup, ebx
0720 mov edi, p
0721 mov esi, expected
0722 mov ebx, dword ptr [desired]
0723 mov ecx, dword ptr [desired + 4]
0724 mov eax, dword ptr [esi]
0725 mov edx, dword ptr [esi + 4]
0726 lock cmpxchg8b qword ptr [edi]
0727 mov dword ptr [esi], eax
0728 mov dword ptr [esi + 4], edx
0729 mov ebx, backup
0730 sete result
0731 };
0732 #endif
0733 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0734
0735 return result;
0736 }
0737
0738 static BOOST_FORCEINLINE bool compare_exchange_weak(
0739 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0740 {
0741 return compare_exchange_strong(storage, expected, desired, success_order, failure_order);
0742 }
0743
0744 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0745 {
0746 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0747
0748 storage_type volatile* p = &storage;
0749 uint32_t backup;
0750 __asm
0751 {
0752 mov backup, ebx
0753 mov edi, p
0754 mov ebx, dword ptr [v]
0755 mov ecx, dword ptr [v + 4]
0756 mov eax, dword ptr [edi]
0757 mov edx, dword ptr [edi + 4]
0758 align 16
0759 again:
0760 lock cmpxchg8b qword ptr [edi]
0761 jne again
0762 mov ebx, backup
0763 mov dword ptr [v], eax
0764 mov dword ptr [v + 4], edx
0765 };
0766
0767 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0768
0769 return v;
0770 }
0771 };
0772
0773 template< bool Signed, bool Interprocess >
0774 struct core_arch_operations< 8u, Signed, Interprocess > :
0775 public core_operations_cas_based< msvc_dcas_x86< Signed, Interprocess > >
0776 {
0777 };
0778
0779 #elif defined(_M_AMD64)
0780
0781 template< bool Signed, bool Interprocess >
0782 struct core_arch_operations< 8u, Signed, Interprocess > :
0783 public core_arch_operations_msvc_x86< 8u, Signed, Interprocess, core_arch_operations< 8u, Signed, Interprocess > >
0784 {
0785 typedef core_arch_operations_msvc_x86< 8u, Signed, Interprocess, core_arch_operations< 8u, Signed, Interprocess > > base_type;
0786 typedef typename base_type::storage_type storage_type;
0787
0788 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0789 {
0790 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
0791 }
0792
0793 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0794 {
0795 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
0796 }
0797
0798 static BOOST_FORCEINLINE bool compare_exchange_strong(
0799 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
0800 {
0801 storage_type previous = expected;
0802 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
0803 expected = old_val;
0804 return (previous == old_val);
0805 }
0806
0807 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0808 {
0809 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
0810 }
0811
0812 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0813 {
0814 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
0815 }
0816
0817 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
0818 {
0819 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
0820 }
0821 };
0822
0823 #endif
0824
0825 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
0826
0827 template< bool Signed, bool Interprocess >
0828 struct msvc_dcas_x86_64
0829 {
0830 typedef typename storage_traits< 16u >::type storage_type;
0831
0832 static BOOST_CONSTEXPR_OR_CONST bool is_interprocess = Interprocess;
0833 static BOOST_CONSTEXPR_OR_CONST bool full_cas_based = true;
0834 static BOOST_CONSTEXPR_OR_CONST bool is_always_lock_free = true;
0835
0836 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 16u;
0837 static BOOST_CONSTEXPR_OR_CONST std::size_t storage_alignment = 16u;
0838 static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
0839
0840 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
0841 {
0842 #if defined(__AVX__)
0843 if (BOOST_LIKELY(order != memory_order_seq_cst && (((uintptr_t)&storage) & 15u) == 0u))
0844 {
0845 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0846 __m128i value;
0847 BOOST_ATOMIC_DETAIL_MEMCPY(&value, &v, sizeof(value));
0848 _mm_store_si128(const_cast< __m128i* >(reinterpret_cast< volatile __m128i* >(&storage)), value);
0849 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0850 return;
0851 }
0852 #endif
0853
0854 storage_type value = const_cast< storage_type& >(storage);
0855 while (!BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE128(&storage, v, &value)) {}
0856 }
0857
0858 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order) BOOST_NOEXCEPT
0859 {
0860 storage_type value;
0861 #if defined(__AVX__)
0862 if (BOOST_LIKELY((((uintptr_t)&storage) & 15u) == 0u))
0863 {
0864 __m128i v = _mm_load_si128(const_cast< const __m128i* >(reinterpret_cast< const volatile __m128i* >(&storage)));
0865 BOOST_ATOMIC_DETAIL_MEMCPY(&value, &v, sizeof(value));
0866 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
0867 return value;
0868 }
0869 #endif
0870
0871 value = storage_type();
0872 BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE128(&storage, value, &value);
0873 return value;
0874 }
0875
0876 static BOOST_FORCEINLINE bool compare_exchange_strong(
0877 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
0878 {
0879 return !!BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE128(&storage, desired, &expected);
0880 }
0881
0882 static BOOST_FORCEINLINE bool compare_exchange_weak(
0883 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
0884 {
0885 return compare_exchange_strong(storage, expected, desired, success_order, failure_order);
0886 }
0887 };
0888
0889 template< bool Signed, bool Interprocess >
0890 struct core_arch_operations< 16u, Signed, Interprocess > :
0891 public core_operations_cas_based< cas_based_exchange< msvc_dcas_x86_64< Signed, Interprocess > > >
0892 {
0893 };
0894
0895 #endif
0896
0897 }
0898 }
0899 }
0900
0901 #include <boost/atomic/detail/footer.hpp>
0902
0903 #endif