File indexing completed on 2025-07-05 08:50:20
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013 #ifndef BOOST_UNORDERED_DETAIL_FOA_CORE_HPP
0014 #define BOOST_UNORDERED_DETAIL_FOA_CORE_HPP
0015
0016 #include <boost/assert.hpp>
0017 #include <boost/config.hpp>
0018 #include <boost/config/workaround.hpp>
0019 #include <boost/core/allocator_traits.hpp>
0020 #include <boost/core/bit.hpp>
0021 #include <boost/core/empty_value.hpp>
0022 #include <boost/core/no_exceptions_support.hpp>
0023 #include <boost/core/pointer_traits.hpp>
0024 #include <boost/cstdint.hpp>
0025 #include <boost/predef.h>
0026 #include <boost/unordered/detail/allocator_constructed.hpp>
0027 #include <boost/unordered/detail/narrow_cast.hpp>
0028 #include <boost/unordered/detail/mulx.hpp>
0029 #include <boost/unordered/detail/static_assert.hpp>
0030 #include <boost/unordered/detail/type_traits.hpp>
0031 #include <boost/unordered/hash_traits.hpp>
0032 #include <climits>
0033 #include <cmath>
0034 #include <cstddef>
0035 #include <cstring>
0036 #include <limits>
0037 #include <memory>
0038 #include <new>
0039 #include <tuple>
0040 #include <type_traits>
0041 #include <utility>
0042
0043 #if defined(BOOST_UNORDERED_ENABLE_STATS)
0044 #include <boost/unordered/detail/foa/cumulative_stats.hpp>
0045 #endif
0046
0047 #if !defined(BOOST_UNORDERED_DISABLE_SSE2)
0048 #if defined(BOOST_UNORDERED_ENABLE_SSE2)|| \
0049 defined(__SSE2__)|| \
0050 defined(_M_X64)||(defined(_M_IX86_FP)&&_M_IX86_FP>=2)
0051 #define BOOST_UNORDERED_SSE2
0052 #endif
0053 #endif
0054
0055 #if !defined(BOOST_UNORDERED_DISABLE_NEON)
0056 #if defined(BOOST_UNORDERED_ENABLE_NEON)||\
0057 (defined(__ARM_NEON)&&!defined(__ARM_BIG_ENDIAN))
0058 #define BOOST_UNORDERED_LITTLE_ENDIAN_NEON
0059 #endif
0060 #endif
0061
0062 #if defined(BOOST_UNORDERED_SSE2)
0063 #include <emmintrin.h>
0064 #elif defined(BOOST_UNORDERED_LITTLE_ENDIAN_NEON)
0065 #include <arm_neon.h>
0066 #endif
0067
0068 #ifdef __has_builtin
0069 #define BOOST_UNORDERED_HAS_BUILTIN(x) __has_builtin(x)
0070 #else
0071 #define BOOST_UNORDERED_HAS_BUILTIN(x) 0
0072 #endif
0073
0074 #if !defined(NDEBUG)
0075 #define BOOST_UNORDERED_ASSUME(cond) BOOST_ASSERT(cond)
0076 #elif BOOST_UNORDERED_HAS_BUILTIN(__builtin_assume)
0077 #define BOOST_UNORDERED_ASSUME(cond) __builtin_assume(cond)
0078 #elif defined(__GNUC__) || BOOST_UNORDERED_HAS_BUILTIN(__builtin_unreachable)
0079 #define BOOST_UNORDERED_ASSUME(cond) \
0080 do{ \
0081 if(!(cond))__builtin_unreachable(); \
0082 }while(0)
0083 #elif defined(_MSC_VER)
0084 #define BOOST_UNORDERED_ASSUME(cond) __assume(cond)
0085 #else
0086 #define BOOST_UNORDERED_ASSUME(cond) \
0087 do{ \
0088 static_cast<void>(false&&(cond)); \
0089 }while(0)
0090 #endif
0091
0092
0093
0094
0095
0096 #if defined(BOOST_GCC)||defined(BOOST_CLANG)
0097 #define BOOST_UNORDERED_PREFETCH(p) __builtin_prefetch((const char*)(p))
0098 #elif defined(BOOST_UNORDERED_SSE2)
0099 #define BOOST_UNORDERED_PREFETCH(p) _mm_prefetch((const char*)(p),_MM_HINT_T0)
0100 #else
0101 #define BOOST_UNORDERED_PREFETCH(p) ((void)(p))
0102 #endif
0103
0104
0105
0106
0107
0108
0109
0110
0111 #if BOOST_ARCH_ARM
0112
0113
0114
0115
0116 #define BOOST_UNORDERED_PREFETCH_ELEMENTS(p,N) \
0117 do{ \
0118 auto BOOST_UNORDERED_P=(p); \
0119 constexpr int cache_line=64; \
0120 const char *p0=reinterpret_cast<const char*>(BOOST_UNORDERED_P), \
0121 *p1=p0+sizeof(*BOOST_UNORDERED_P)*(N)/2; \
0122 for(;p0<p1;p0+=cache_line)BOOST_UNORDERED_PREFETCH(p0); \
0123 }while(0)
0124 #else
0125 #define BOOST_UNORDERED_PREFETCH_ELEMENTS(p,N) BOOST_UNORDERED_PREFETCH(p)
0126 #endif
0127
0128 #ifdef __has_feature
0129 #define BOOST_UNORDERED_HAS_FEATURE(x) __has_feature(x)
0130 #else
0131 #define BOOST_UNORDERED_HAS_FEATURE(x) 0
0132 #endif
0133
0134 #if BOOST_UNORDERED_HAS_FEATURE(thread_sanitizer)|| \
0135 defined(__SANITIZE_THREAD__)
0136 #define BOOST_UNORDERED_THREAD_SANITIZER
0137 #endif
0138
0139 #define BOOST_UNORDERED_STATIC_ASSERT_HASH_PRED(Hash, Pred) \
0140 static_assert(boost::unordered::detail::is_nothrow_swappable<Hash>::value, \
0141 "Template parameter Hash is required to be nothrow Swappable."); \
0142 static_assert(boost::unordered::detail::is_nothrow_swappable<Pred>::value, \
0143 "Template parameter Pred is required to be nothrow Swappable");
0144
0145 namespace boost{
0146 namespace unordered{
0147 namespace detail{
0148 namespace foa{
0149
0150 static constexpr std::size_t default_bucket_count=0;
0151
0152
0153
0154
0155
0156
0157
0158
0159
0160
0161
0162
0163
0164
0165
0166
0167
0168
0169
0170
0171
0172
0173
0174
0175
0176
0177
0178
0179
0180
0181
0182
0183
0184
0185
0186
0187
0188
0189
0190
0191
0192
0193
0194
0195
0196
0197
0198
0199
0200
0201
0202
0203
0204
0205
0206
0207
0208
0209
0210
0211
0212
0213
0214
0215
0216 #if defined(BOOST_UNORDERED_SSE2)
0217
0218 template<template<typename> class IntegralWrapper>
0219 struct group15
0220 {
0221 static constexpr std::size_t N=15;
0222 static constexpr bool regular_layout=true;
0223
0224 struct dummy_group_type
0225 {
0226 alignas(16) unsigned char storage[N+1]={0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0};
0227 };
0228
0229 inline void initialize()
0230 {
0231 _mm_store_si128(
0232 reinterpret_cast<__m128i*>(m),_mm_setzero_si128());
0233 }
0234
0235 inline void set(std::size_t pos,std::size_t hash)
0236 {
0237 BOOST_ASSERT(pos<N);
0238 at(pos)=reduced_hash(hash);
0239 }
0240
0241 inline void set_sentinel()
0242 {
0243 at(N-1)=sentinel_;
0244 }
0245
0246 inline bool is_sentinel(std::size_t pos)const
0247 {
0248 BOOST_ASSERT(pos<N);
0249 return at(pos)==sentinel_;
0250 }
0251
0252 static inline bool is_sentinel(unsigned char* pc)noexcept
0253 {
0254 return *pc==sentinel_;
0255 }
0256
0257 inline void reset(std::size_t pos)
0258 {
0259 BOOST_ASSERT(pos<N);
0260 at(pos)=available_;
0261 }
0262
0263 static inline void reset(unsigned char* pc)
0264 {
0265 *reinterpret_cast<slot_type*>(pc)=available_;
0266 }
0267
0268 inline int match(std::size_t hash)const
0269 {
0270 return _mm_movemask_epi8(
0271 _mm_cmpeq_epi8(load_metadata(),_mm_set1_epi32(match_word(hash))))&0x7FFF;
0272 }
0273
0274 inline bool is_not_overflowed(std::size_t hash)const
0275 {
0276 static constexpr unsigned char shift[]={1,2,4,8,16,32,64,128};
0277
0278 return !(overflow()&shift[hash%8]);
0279 }
0280
0281 inline void mark_overflow(std::size_t hash)
0282 {
0283 overflow()|=static_cast<unsigned char>(1<<(hash%8));
0284 }
0285
0286 static inline bool maybe_caused_overflow(unsigned char* pc)
0287 {
0288 std::size_t pos=reinterpret_cast<uintptr_t>(pc)%sizeof(group15);
0289 group15 *pg=reinterpret_cast<group15*>(pc-pos);
0290 return !pg->is_not_overflowed(*pc);
0291 }
0292
0293 inline int match_available()const
0294 {
0295 return _mm_movemask_epi8(
0296 _mm_cmpeq_epi8(load_metadata(),_mm_setzero_si128()))&0x7FFF;
0297 }
0298
0299 inline bool is_occupied(std::size_t pos)const
0300 {
0301 BOOST_ASSERT(pos<N);
0302 return at(pos)!=available_;
0303 }
0304
0305 static inline bool is_occupied(unsigned char* pc)noexcept
0306 {
0307 return *reinterpret_cast<slot_type*>(pc)!=available_;
0308 }
0309
0310 inline int match_occupied()const
0311 {
0312 return (~match_available())&0x7FFF;
0313 }
0314
0315 private:
0316 using slot_type=IntegralWrapper<unsigned char>;
0317 BOOST_UNORDERED_STATIC_ASSERT(sizeof(slot_type)==1);
0318
0319 static constexpr unsigned char available_=0,
0320 sentinel_=1;
0321
0322 inline __m128i load_metadata()const
0323 {
0324 #if defined(BOOST_UNORDERED_THREAD_SANITIZER)
0325
0326
0327
0328
0329 return _mm_set_epi8(
0330 (char)m[15],(char)m[14],(char)m[13],(char)m[12],
0331 (char)m[11],(char)m[10],(char)m[ 9],(char)m[ 8],
0332 (char)m[ 7],(char)m[ 6],(char)m[ 5],(char)m[ 4],
0333 (char)m[ 3],(char)m[ 2],(char)m[ 1],(char)m[ 0]);
0334 #else
0335 return _mm_load_si128(reinterpret_cast<const __m128i*>(m));
0336 #endif
0337 }
0338
0339 inline static int match_word(std::size_t hash)
0340 {
0341 static constexpr boost::uint32_t word[]=
0342 {
0343 0x08080808u,0x09090909u,0x02020202u,0x03030303u,0x04040404u,0x05050505u,
0344 0x06060606u,0x07070707u,0x08080808u,0x09090909u,0x0A0A0A0Au,0x0B0B0B0Bu,
0345 0x0C0C0C0Cu,0x0D0D0D0Du,0x0E0E0E0Eu,0x0F0F0F0Fu,0x10101010u,0x11111111u,
0346 0x12121212u,0x13131313u,0x14141414u,0x15151515u,0x16161616u,0x17171717u,
0347 0x18181818u,0x19191919u,0x1A1A1A1Au,0x1B1B1B1Bu,0x1C1C1C1Cu,0x1D1D1D1Du,
0348 0x1E1E1E1Eu,0x1F1F1F1Fu,0x20202020u,0x21212121u,0x22222222u,0x23232323u,
0349 0x24242424u,0x25252525u,0x26262626u,0x27272727u,0x28282828u,0x29292929u,
0350 0x2A2A2A2Au,0x2B2B2B2Bu,0x2C2C2C2Cu,0x2D2D2D2Du,0x2E2E2E2Eu,0x2F2F2F2Fu,
0351 0x30303030u,0x31313131u,0x32323232u,0x33333333u,0x34343434u,0x35353535u,
0352 0x36363636u,0x37373737u,0x38383838u,0x39393939u,0x3A3A3A3Au,0x3B3B3B3Bu,
0353 0x3C3C3C3Cu,0x3D3D3D3Du,0x3E3E3E3Eu,0x3F3F3F3Fu,0x40404040u,0x41414141u,
0354 0x42424242u,0x43434343u,0x44444444u,0x45454545u,0x46464646u,0x47474747u,
0355 0x48484848u,0x49494949u,0x4A4A4A4Au,0x4B4B4B4Bu,0x4C4C4C4Cu,0x4D4D4D4Du,
0356 0x4E4E4E4Eu,0x4F4F4F4Fu,0x50505050u,0x51515151u,0x52525252u,0x53535353u,
0357 0x54545454u,0x55555555u,0x56565656u,0x57575757u,0x58585858u,0x59595959u,
0358 0x5A5A5A5Au,0x5B5B5B5Bu,0x5C5C5C5Cu,0x5D5D5D5Du,0x5E5E5E5Eu,0x5F5F5F5Fu,
0359 0x60606060u,0x61616161u,0x62626262u,0x63636363u,0x64646464u,0x65656565u,
0360 0x66666666u,0x67676767u,0x68686868u,0x69696969u,0x6A6A6A6Au,0x6B6B6B6Bu,
0361 0x6C6C6C6Cu,0x6D6D6D6Du,0x6E6E6E6Eu,0x6F6F6F6Fu,0x70707070u,0x71717171u,
0362 0x72727272u,0x73737373u,0x74747474u,0x75757575u,0x76767676u,0x77777777u,
0363 0x78787878u,0x79797979u,0x7A7A7A7Au,0x7B7B7B7Bu,0x7C7C7C7Cu,0x7D7D7D7Du,
0364 0x7E7E7E7Eu,0x7F7F7F7Fu,0x80808080u,0x81818181u,0x82828282u,0x83838383u,
0365 0x84848484u,0x85858585u,0x86868686u,0x87878787u,0x88888888u,0x89898989u,
0366 0x8A8A8A8Au,0x8B8B8B8Bu,0x8C8C8C8Cu,0x8D8D8D8Du,0x8E8E8E8Eu,0x8F8F8F8Fu,
0367 0x90909090u,0x91919191u,0x92929292u,0x93939393u,0x94949494u,0x95959595u,
0368 0x96969696u,0x97979797u,0x98989898u,0x99999999u,0x9A9A9A9Au,0x9B9B9B9Bu,
0369 0x9C9C9C9Cu,0x9D9D9D9Du,0x9E9E9E9Eu,0x9F9F9F9Fu,0xA0A0A0A0u,0xA1A1A1A1u,
0370 0xA2A2A2A2u,0xA3A3A3A3u,0xA4A4A4A4u,0xA5A5A5A5u,0xA6A6A6A6u,0xA7A7A7A7u,
0371 0xA8A8A8A8u,0xA9A9A9A9u,0xAAAAAAAAu,0xABABABABu,0xACACACACu,0xADADADADu,
0372 0xAEAEAEAEu,0xAFAFAFAFu,0xB0B0B0B0u,0xB1B1B1B1u,0xB2B2B2B2u,0xB3B3B3B3u,
0373 0xB4B4B4B4u,0xB5B5B5B5u,0xB6B6B6B6u,0xB7B7B7B7u,0xB8B8B8B8u,0xB9B9B9B9u,
0374 0xBABABABAu,0xBBBBBBBBu,0xBCBCBCBCu,0xBDBDBDBDu,0xBEBEBEBEu,0xBFBFBFBFu,
0375 0xC0C0C0C0u,0xC1C1C1C1u,0xC2C2C2C2u,0xC3C3C3C3u,0xC4C4C4C4u,0xC5C5C5C5u,
0376 0xC6C6C6C6u,0xC7C7C7C7u,0xC8C8C8C8u,0xC9C9C9C9u,0xCACACACAu,0xCBCBCBCBu,
0377 0xCCCCCCCCu,0xCDCDCDCDu,0xCECECECEu,0xCFCFCFCFu,0xD0D0D0D0u,0xD1D1D1D1u,
0378 0xD2D2D2D2u,0xD3D3D3D3u,0xD4D4D4D4u,0xD5D5D5D5u,0xD6D6D6D6u,0xD7D7D7D7u,
0379 0xD8D8D8D8u,0xD9D9D9D9u,0xDADADADAu,0xDBDBDBDBu,0xDCDCDCDCu,0xDDDDDDDDu,
0380 0xDEDEDEDEu,0xDFDFDFDFu,0xE0E0E0E0u,0xE1E1E1E1u,0xE2E2E2E2u,0xE3E3E3E3u,
0381 0xE4E4E4E4u,0xE5E5E5E5u,0xE6E6E6E6u,0xE7E7E7E7u,0xE8E8E8E8u,0xE9E9E9E9u,
0382 0xEAEAEAEAu,0xEBEBEBEBu,0xECECECECu,0xEDEDEDEDu,0xEEEEEEEEu,0xEFEFEFEFu,
0383 0xF0F0F0F0u,0xF1F1F1F1u,0xF2F2F2F2u,0xF3F3F3F3u,0xF4F4F4F4u,0xF5F5F5F5u,
0384 0xF6F6F6F6u,0xF7F7F7F7u,0xF8F8F8F8u,0xF9F9F9F9u,0xFAFAFAFAu,0xFBFBFBFBu,
0385 0xFCFCFCFCu,0xFDFDFDFDu,0xFEFEFEFEu,0xFFFFFFFFu,
0386 };
0387
0388 return (int)word[narrow_cast<unsigned char>(hash)];
0389 }
0390
0391 inline static unsigned char reduced_hash(std::size_t hash)
0392 {
0393 return narrow_cast<unsigned char>(match_word(hash));
0394 }
0395
0396 inline slot_type& at(std::size_t pos)
0397 {
0398 return m[pos];
0399 }
0400
0401 inline const slot_type& at(std::size_t pos)const
0402 {
0403 return m[pos];
0404 }
0405
0406 inline slot_type& overflow()
0407 {
0408 return at(N);
0409 }
0410
0411 inline const slot_type& overflow()const
0412 {
0413 return at(N);
0414 }
0415
0416 alignas(16) slot_type m[16];
0417 };
0418
0419 #elif defined(BOOST_UNORDERED_LITTLE_ENDIAN_NEON)
0420
0421 template<template<typename> class IntegralWrapper>
0422 struct group15
0423 {
0424 static constexpr std::size_t N=15;
0425 static constexpr bool regular_layout=true;
0426
0427 struct dummy_group_type
0428 {
0429 alignas(16) unsigned char storage[N+1]={0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0};
0430 };
0431
0432 inline void initialize()
0433 {
0434 vst1q_u8(reinterpret_cast<uint8_t*>(m),vdupq_n_u8(0));
0435 }
0436
0437 inline void set(std::size_t pos,std::size_t hash)
0438 {
0439 BOOST_ASSERT(pos<N);
0440 at(pos)=reduced_hash(hash);
0441 }
0442
0443 inline void set_sentinel()
0444 {
0445 at(N-1)=sentinel_;
0446 }
0447
0448 inline bool is_sentinel(std::size_t pos)const
0449 {
0450 BOOST_ASSERT(pos<N);
0451 return pos==N-1&&at(N-1)==sentinel_;
0452 }
0453
0454 static inline bool is_sentinel(unsigned char* pc)noexcept
0455 {
0456 return *reinterpret_cast<slot_type*>(pc)==sentinel_;
0457 }
0458
0459 inline void reset(std::size_t pos)
0460 {
0461 BOOST_ASSERT(pos<N);
0462 at(pos)=available_;
0463 }
0464
0465 static inline void reset(unsigned char* pc)
0466 {
0467 *reinterpret_cast<slot_type*>(pc)=available_;
0468 }
0469
0470 inline int match(std::size_t hash)const
0471 {
0472 return simde_mm_movemask_epi8(vceqq_u8(
0473 load_metadata(),vdupq_n_u8(reduced_hash(hash))))&0x7FFF;
0474 }
0475
0476 inline bool is_not_overflowed(std::size_t hash)const
0477 {
0478 static constexpr unsigned char shift[]={1,2,4,8,16,32,64,128};
0479
0480 return !(overflow()&shift[hash%8]);
0481 }
0482
0483 inline void mark_overflow(std::size_t hash)
0484 {
0485 overflow()|=static_cast<unsigned char>(1<<(hash%8));
0486 }
0487
0488 static inline bool maybe_caused_overflow(unsigned char* pc)
0489 {
0490 std::size_t pos=reinterpret_cast<uintptr_t>(pc)%sizeof(group15);
0491 group15 *pg=reinterpret_cast<group15*>(pc-pos);
0492 return !pg->is_not_overflowed(*pc);
0493 };
0494
0495 inline int match_available()const
0496 {
0497 return simde_mm_movemask_epi8(vceqq_u8(
0498 load_metadata(),vdupq_n_u8(0)))&0x7FFF;
0499 }
0500
0501 inline bool is_occupied(std::size_t pos)const
0502 {
0503 BOOST_ASSERT(pos<N);
0504 return at(pos)!=available_;
0505 }
0506
0507 static inline bool is_occupied(unsigned char* pc)noexcept
0508 {
0509 return *reinterpret_cast<slot_type*>(pc)!=available_;
0510 }
0511
0512 inline int match_occupied()const
0513 {
0514 return simde_mm_movemask_epi8(vcgtq_u8(
0515 load_metadata(),vdupq_n_u8(0)))&0x7FFF;
0516 }
0517
0518 private:
0519 using slot_type=IntegralWrapper<unsigned char>;
0520 BOOST_UNORDERED_STATIC_ASSERT(sizeof(slot_type)==1);
0521
0522 static constexpr unsigned char available_=0,
0523 sentinel_=1;
0524
0525 inline uint8x16_t load_metadata()const
0526 {
0527 #if defined(BOOST_UNORDERED_THREAD_SANITIZER)
0528
0529
0530
0531
0532 alignas(16) uint8_t data[16]={
0533 m[ 0],m[ 1],m[ 2],m[ 3],m[ 4],m[ 5],m[ 6],m[ 7],
0534 m[ 8],m[ 9],m[10],m[11],m[12],m[13],m[14],m[15]};
0535 return vld1q_u8(data);
0536 #else
0537 return vld1q_u8(reinterpret_cast<const uint8_t*>(m));
0538 #endif
0539 }
0540
0541 inline static unsigned char reduced_hash(std::size_t hash)
0542 {
0543 static constexpr unsigned char table[]={
0544 8,9,2,3,4,5,6,7,8,9,10,11,12,13,14,15,
0545 16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,
0546 32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,
0547 48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,
0548 64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,
0549 80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,
0550 96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,
0551 112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,
0552 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,
0553 144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,
0554 160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,
0555 176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,
0556 192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,
0557 208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,
0558 224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,
0559 240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,
0560 };
0561
0562 return table[(unsigned char)hash];
0563 }
0564
0565
0566
0567
0568
0569
0570 static inline int simde_mm_movemask_epi8(uint8x16_t a)
0571 {
0572 static constexpr uint8_t md[16]={
0573 1 << 0, 1 << 1, 1 << 2, 1 << 3,
0574 1 << 4, 1 << 5, 1 << 6, 1 << 7,
0575 1 << 0, 1 << 1, 1 << 2, 1 << 3,
0576 1 << 4, 1 << 5, 1 << 6, 1 << 7,
0577 };
0578
0579 uint8x16_t masked=vandq_u8(vld1q_u8(md),a);
0580 uint8x8x2_t tmp=vzip_u8(vget_low_u8(masked),vget_high_u8(masked));
0581 uint16x8_t x=vreinterpretq_u16_u8(vcombine_u8(tmp.val[0],tmp.val[1]));
0582
0583 #if defined(__ARM_ARCH_ISA_A64)
0584 return vaddvq_u16(x);
0585 #else
0586 uint64x2_t t64=vpaddlq_u32(vpaddlq_u16(x));
0587 return int(vgetq_lane_u64(t64,0))+int(vgetq_lane_u64(t64,1));
0588 #endif
0589 }
0590
0591 inline slot_type& at(std::size_t pos)
0592 {
0593 return m[pos];
0594 }
0595
0596 inline const slot_type& at(std::size_t pos)const
0597 {
0598 return m[pos];
0599 }
0600
0601 inline slot_type& overflow()
0602 {
0603 return at(N);
0604 }
0605
0606 inline const slot_type& overflow()const
0607 {
0608 return at(N);
0609 }
0610
0611 alignas(16) slot_type m[16];
0612 };
0613
0614 #else
0615
0616 template<template<typename> class IntegralWrapper>
0617 struct group15
0618 {
0619 static constexpr std::size_t N=15;
0620 static constexpr bool regular_layout=false;
0621
0622 struct dummy_group_type
0623 {
0624 alignas(16) boost::uint64_t m[2]=
0625 {0x0000000000004000ull,0x0000000000000000ull};
0626 };
0627
0628 inline void initialize(){m[0]=0;m[1]=0;}
0629
0630 inline void set(std::size_t pos,std::size_t hash)
0631 {
0632 BOOST_ASSERT(pos<N);
0633 set_impl(pos,reduced_hash(hash));
0634 }
0635
0636 inline void set_sentinel()
0637 {
0638 set_impl(N-1,sentinel_);
0639 }
0640
0641 inline bool is_sentinel(std::size_t pos)const
0642 {
0643 BOOST_ASSERT(pos<N);
0644 return
0645 pos==N-1&&
0646 (m[0] & boost::uint64_t(0x4000400040004000ull))==
0647 boost::uint64_t(0x4000ull)&&
0648 (m[1] & boost::uint64_t(0x4000400040004000ull))==0;
0649 }
0650
0651 inline void reset(std::size_t pos)
0652 {
0653 BOOST_ASSERT(pos<N);
0654 set_impl(pos,available_);
0655 }
0656
0657 static inline void reset(unsigned char* pc)
0658 {
0659 std::size_t pos=reinterpret_cast<uintptr_t>(pc)%sizeof(group15);
0660 pc-=pos;
0661 reinterpret_cast<group15*>(pc)->reset(pos);
0662 }
0663
0664 inline int match(std::size_t hash)const
0665 {
0666 return match_impl(reduced_hash(hash));
0667 }
0668
0669 inline bool is_not_overflowed(std::size_t hash)const
0670 {
0671 return !(reinterpret_cast<const boost::uint16_t*>(m)[hash%8] & 0x8000u);
0672 }
0673
0674 inline void mark_overflow(std::size_t hash)
0675 {
0676 reinterpret_cast<boost::uint16_t*>(m)[hash%8]|=0x8000u;
0677 }
0678
0679 static inline bool maybe_caused_overflow(unsigned char* pc)
0680 {
0681 std::size_t pos=reinterpret_cast<uintptr_t>(pc)%sizeof(group15);
0682 group15 *pg=reinterpret_cast<group15*>(pc-pos);
0683 boost::uint64_t x=((pg->m[0])>>pos)&0x000100010001ull;
0684 boost::uint32_t y=narrow_cast<boost::uint32_t>(x|(x>>15)|(x>>30));
0685 return !pg->is_not_overflowed(y);
0686 };
0687
0688 inline int match_available()const
0689 {
0690 boost::uint64_t x=~(m[0]|m[1]);
0691 boost::uint32_t y=static_cast<boost::uint32_t>(x&(x>>32));
0692 y&=y>>16;
0693 return y&0x7FFF;
0694 }
0695
0696 inline bool is_occupied(std::size_t pos)const
0697 {
0698 BOOST_ASSERT(pos<N);
0699 boost::uint64_t x=m[0]|m[1];
0700 return (x&(0x0001000100010001ull<<pos))!=0;
0701 }
0702
0703 inline int match_occupied()const
0704 {
0705 boost::uint64_t x=m[0]|m[1];
0706 boost::uint32_t y=narrow_cast<boost::uint32_t>(x|(x>>32));
0707 y|=y>>16;
0708 return y&0x7FFF;
0709 }
0710
0711 private:
0712 using word_type=IntegralWrapper<uint64_t>;
0713 BOOST_UNORDERED_STATIC_ASSERT(sizeof(word_type)==8);
0714
0715 static constexpr unsigned char available_=0,
0716 sentinel_=1;
0717
0718 inline static unsigned char reduced_hash(std::size_t hash)
0719 {
0720 static constexpr unsigned char table[]={
0721 8,9,2,3,4,5,6,7,8,9,10,11,12,13,14,15,
0722 16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,
0723 32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,
0724 48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,
0725 64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,
0726 80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,
0727 96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,
0728 112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,
0729 128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,
0730 144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,
0731 160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,
0732 176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,
0733 192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,
0734 208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,
0735 224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,
0736 240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,
0737 };
0738
0739 return table[narrow_cast<unsigned char>(hash)];
0740 }
0741
0742 inline void set_impl(std::size_t pos,std::size_t n)
0743 {
0744 BOOST_ASSERT(n<256);
0745 set_impl(m[0],pos,n&0xFu);
0746 set_impl(m[1],pos,n>>4);
0747 }
0748
0749 static inline void set_impl(word_type& x,std::size_t pos,std::size_t n)
0750 {
0751 static constexpr boost::uint64_t mask[]=
0752 {
0753 0x0000000000000000ull,0x0000000000000001ull,0x0000000000010000ull,
0754 0x0000000000010001ull,0x0000000100000000ull,0x0000000100000001ull,
0755 0x0000000100010000ull,0x0000000100010001ull,0x0001000000000000ull,
0756 0x0001000000000001ull,0x0001000000010000ull,0x0001000000010001ull,
0757 0x0001000100000000ull,0x0001000100000001ull,0x0001000100010000ull,
0758 0x0001000100010001ull,
0759 };
0760 static constexpr boost::uint64_t imask[]=
0761 {
0762 0x0001000100010001ull,0x0001000100010000ull,0x0001000100000001ull,
0763 0x0001000100000000ull,0x0001000000010001ull,0x0001000000010000ull,
0764 0x0001000000000001ull,0x0001000000000000ull,0x0000000100010001ull,
0765 0x0000000100010000ull,0x0000000100000001ull,0x0000000100000000ull,
0766 0x0000000000010001ull,0x0000000000010000ull,0x0000000000000001ull,
0767 0x0000000000000000ull,
0768 };
0769
0770 BOOST_ASSERT(pos<16&&n<16);
0771 x|= mask[n]<<pos;
0772 x&=~(imask[n]<<pos);
0773 }
0774
0775 inline int match_impl(std::size_t n)const
0776 {
0777 static constexpr boost::uint64_t mask[]=
0778 {
0779 0x0000000000000000ull,0x000000000000ffffull,0x00000000ffff0000ull,
0780 0x00000000ffffffffull,0x0000ffff00000000ull,0x0000ffff0000ffffull,
0781 0x0000ffffffff0000ull,0x0000ffffffffffffull,0xffff000000000000ull,
0782 0xffff00000000ffffull,0xffff0000ffff0000ull,0xffff0000ffffffffull,
0783 0xffffffff00000000ull,0xffffffff0000ffffull,0xffffffffffff0000ull,
0784 0xffffffffffffffffull,
0785 };
0786
0787 BOOST_ASSERT(n<256);
0788 boost::uint64_t x=m[0]^mask[n&0xFu];
0789 x=~((m[1]^mask[n>>4])|x);
0790 boost::uint32_t y=static_cast<boost::uint32_t>(x&(x>>32));
0791 y&=y>>16;
0792 return y&0x7FFF;
0793 }
0794
0795 alignas(16) word_type m[2];
0796 };
0797
0798 #endif
0799
0800
0801
0802
0803
0804
0805
0806
0807
0808
0809
0810
0811
0812
0813
0814
0815
0816
0817
0818
0819
0820
0821
0822
0823
0824
0825
0826
0827
0828 struct pow2_size_policy
0829 {
0830 static inline std::size_t size_index(std::size_t n)
0831 {
0832
0833
0834
0835 return sizeof(std::size_t)*CHAR_BIT-
0836 (n<=2?1:((std::size_t)(boost::core::bit_width(n-1))));
0837 }
0838
0839 static inline std::size_t size(std::size_t size_index_)
0840 {
0841 return std::size_t(1)<<(sizeof(std::size_t)*CHAR_BIT-size_index_);
0842 }
0843
0844 static constexpr std::size_t min_size(){return 2;}
0845
0846 static inline std::size_t position(std::size_t hash,std::size_t size_index_)
0847 {
0848 return hash>>size_index_;
0849 }
0850 };
0851
0852
0853
0854 template<typename Group,typename SizePolicy>
0855 static inline std::size_t size_index_for(std::size_t n)
0856 {
0857
0858 return SizePolicy::size_index(n/Group::N+1);
0859 }
0860
0861
0862
0863
0864
0865
0866 struct pow2_quadratic_prober
0867 {
0868 pow2_quadratic_prober(std::size_t pos_):pos{pos_}{}
0869
0870 inline std::size_t get()const{return pos;}
0871 inline std::size_t length()const{return step+1;}
0872
0873
0874
0875
0876
0877
0878 inline bool next(std::size_t mask)
0879 {
0880 step+=1;
0881 pos=(pos+step)&mask;
0882 return step<=mask;
0883 }
0884
0885 private:
0886 std::size_t pos,step=0;
0887 };
0888
0889
0890
0891
0892
0893
0894
0895
0896
0897 struct no_mix
0898 {
0899 template<typename Hash,typename T>
0900 static inline std::size_t mix(const Hash& h,const T& x)
0901 {
0902 return h(x);
0903 }
0904 };
0905
0906 struct mulx_mix
0907 {
0908 template<typename Hash,typename T>
0909 static inline std::size_t mix(const Hash& h,const T& x)
0910 {
0911 return mulx(h(x));
0912 }
0913 };
0914
0915
0916
0917
0918
0919 inline unsigned int unchecked_countr_zero(int x)
0920 {
0921 #if defined(BOOST_MSVC)
0922 unsigned long r;
0923 _BitScanForward(&r,(unsigned long)x);
0924 return (unsigned int)r;
0925 #else
0926 BOOST_UNORDERED_ASSUME(x!=0);
0927 return (unsigned int)boost::core::countr_zero((unsigned int)x);
0928 #endif
0929 }
0930
0931
0932
0933
0934
0935
0936
0937
0938
0939
0940 template<typename Group,std::size_t Size>
0941 Group* dummy_groups()
0942 {
0943
0944
0945
0946
0947
0948
0949
0950
0951 static constexpr typename Group::dummy_group_type
0952 storage[Size]={typename Group::dummy_group_type(),};
0953
0954 return reinterpret_cast<Group*>(
0955 const_cast<typename Group::dummy_group_type*>(storage));
0956 }
0957
0958 template<
0959 typename Ptr,typename Ptr2,
0960 typename std::enable_if<!std::is_same<Ptr,Ptr2>::value>::type* = nullptr
0961 >
0962 Ptr to_pointer(Ptr2 p)
0963 {
0964 if(!p){return nullptr;}
0965 return boost::pointer_traits<Ptr>::pointer_to(*p);
0966 }
0967
0968 template<typename Ptr>
0969 Ptr to_pointer(Ptr p)
0970 {
0971 return p;
0972 }
0973
0974 template<typename Arrays,typename Allocator>
0975 struct arrays_holder
0976 {
0977 arrays_holder(const Arrays& arrays,const Allocator& al):
0978 arrays_{arrays},al_{al}
0979 {}
0980
0981
0982 arrays_holder(arrays_holder const&);
0983 arrays_holder& operator=(arrays_holder const&)=delete;
0984
0985 ~arrays_holder()
0986 {
0987 if(!released_){
0988 arrays_.delete_(typename Arrays::allocator_type(al_),arrays_);
0989 }
0990 }
0991
0992 const Arrays& release()
0993 {
0994 released_=true;
0995 return arrays_;
0996 }
0997
0998 private:
0999 Arrays arrays_;
1000 Allocator al_;
1001 bool released_=false;
1002 };
1003
1004 template<typename Value,typename Group,typename SizePolicy,typename Allocator>
1005 struct table_arrays
1006 {
1007 using allocator_type=typename boost::allocator_rebind<Allocator,Value>::type;
1008
1009 using value_type=Value;
1010 using group_type=Group;
1011 static constexpr auto N=group_type::N;
1012 using size_policy=SizePolicy;
1013 using value_type_pointer=
1014 typename boost::allocator_pointer<allocator_type>::type;
1015 using group_type_pointer=
1016 typename boost::pointer_traits<value_type_pointer>::template
1017 rebind<group_type>;
1018 using group_type_pointer_traits=boost::pointer_traits<group_type_pointer>;
1019
1020 table_arrays(
1021 std::size_t gsi,std::size_t gsm,
1022 group_type_pointer pg,value_type_pointer pe):
1023 groups_size_index{gsi},groups_size_mask{gsm},groups_{pg},elements_{pe}{}
1024
1025 value_type* elements()const noexcept{return boost::to_address(elements_);}
1026 group_type* groups()const noexcept{return boost::to_address(groups_);}
1027
1028 static void set_arrays(table_arrays& arrays,allocator_type al,std::size_t n)
1029 {
1030 return set_arrays(
1031 arrays,al,n,std::is_same<group_type*,group_type_pointer>{});
1032 }
1033
1034 static void set_arrays(
1035 table_arrays& arrays,allocator_type al,std::size_t,
1036 std::false_type )
1037 {
1038 using storage_traits=boost::allocator_traits<allocator_type>;
1039 auto groups_size_index=arrays.groups_size_index;
1040 auto groups_size=size_policy::size(groups_size_index);
1041
1042 auto sal=allocator_type(al);
1043 arrays.elements_=storage_traits::allocate(sal,buffer_size(groups_size));
1044
1045
1046
1047
1048
1049 auto p=reinterpret_cast<unsigned char*>(arrays.elements()+groups_size*N-1);
1050 p+=(uintptr_t(sizeof(group_type))-
1051 reinterpret_cast<uintptr_t>(p))%sizeof(group_type);
1052 arrays.groups_=
1053 group_type_pointer_traits::pointer_to(*reinterpret_cast<group_type*>(p));
1054
1055 initialize_groups(
1056 arrays.groups(),groups_size,
1057 is_trivially_default_constructible<group_type>{});
1058 arrays.groups()[groups_size-1].set_sentinel();
1059 }
1060
1061 static void set_arrays(
1062 table_arrays& arrays,allocator_type al,std::size_t n,
1063 std::true_type )
1064 {
1065 if(!n){
1066 arrays.groups_=dummy_groups<group_type,size_policy::min_size()>();
1067 }
1068 else{
1069 set_arrays(arrays,al,n,std::false_type{});
1070 }
1071 }
1072
1073 static table_arrays new_(allocator_type al,std::size_t n)
1074 {
1075 auto groups_size_index=size_index_for<group_type,size_policy>(n);
1076 auto groups_size=size_policy::size(groups_size_index);
1077 table_arrays arrays{groups_size_index,groups_size-1,nullptr,nullptr};
1078
1079 set_arrays(arrays,al,n);
1080 return arrays;
1081 }
1082
1083 static void delete_(allocator_type al,table_arrays& arrays)noexcept
1084 {
1085 using storage_traits=boost::allocator_traits<allocator_type>;
1086
1087 auto sal=allocator_type(al);
1088 if(arrays.elements()){
1089 storage_traits::deallocate(
1090 sal,arrays.elements_,buffer_size(arrays.groups_size_mask+1));
1091 }
1092 }
1093
1094
1095
1096 static std::size_t buffer_size(std::size_t groups_size)
1097 {
1098 auto buffer_bytes=
1099
1100 sizeof(value_type)*(groups_size*N-1)+
1101
1102 sizeof(group_type)*(groups_size+1)-1;
1103
1104
1105 return (buffer_bytes+sizeof(value_type)-1)/sizeof(value_type);
1106 }
1107
1108 static void initialize_groups(
1109 group_type* pg,std::size_t size,std::true_type )
1110 {
1111
1112
1113
1114
1115
1116
1117 std::memset(
1118 reinterpret_cast<unsigned char*>(pg),0,sizeof(group_type)*size);
1119 }
1120
1121 static void initialize_groups(
1122 group_type* pg,std::size_t size,std::false_type )
1123 {
1124 while(size--!=0)::new (pg++) group_type();
1125 }
1126
1127 std::size_t groups_size_index;
1128 std::size_t groups_size_mask;
1129 group_type_pointer groups_;
1130 value_type_pointer elements_;
1131 };
1132
1133 #if defined(BOOST_UNORDERED_ENABLE_STATS)
1134
1135
1136 struct table_core_cumulative_stats
1137 {
1138 concurrent_cumulative_stats<1> insertion;
1139 concurrent_cumulative_stats<2> successful_lookup,
1140 unsuccessful_lookup;
1141 };
1142
1143 struct table_core_insertion_stats
1144 {
1145 std::size_t count;
1146 sequence_stats_summary probe_length;
1147 };
1148
1149 struct table_core_lookup_stats
1150 {
1151 std::size_t count;
1152 sequence_stats_summary probe_length;
1153 sequence_stats_summary num_comparisons;
1154 };
1155
1156 struct table_core_stats
1157 {
1158 table_core_insertion_stats insertion;
1159 table_core_lookup_stats successful_lookup,
1160 unsuccessful_lookup;
1161 };
1162
1163 #define BOOST_UNORDERED_ADD_STATS(stats,args) stats.add args
1164 #define BOOST_UNORDERED_SWAP_STATS(stats1,stats2) std::swap(stats1,stats2)
1165 #define BOOST_UNORDERED_COPY_STATS(stats1,stats2) stats1=stats2
1166 #define BOOST_UNORDERED_RESET_STATS_OF(x) x.reset_stats()
1167 #define BOOST_UNORDERED_STATS_COUNTER(name) std::size_t name=0
1168 #define BOOST_UNORDERED_INCREMENT_STATS_COUNTER(name) ++name
1169
1170 #else
1171
1172 #define BOOST_UNORDERED_ADD_STATS(stats,args) ((void)0)
1173 #define BOOST_UNORDERED_SWAP_STATS(stats1,stats2) ((void)0)
1174 #define BOOST_UNORDERED_COPY_STATS(stats1,stats2) ((void)0)
1175 #define BOOST_UNORDERED_RESET_STATS_OF(x) ((void)0)
1176 #define BOOST_UNORDERED_STATS_COUNTER(name) ((void)0)
1177 #define BOOST_UNORDERED_INCREMENT_STATS_COUNTER(name) ((void)0)
1178
1179 #endif
1180
1181 struct if_constexpr_void_else{void operator()()const{}};
1182
1183 template<bool B,typename F,typename G=if_constexpr_void_else>
1184 void if_constexpr(F f,G g={})
1185 {
1186 std::get<B?0:1>(std::forward_as_tuple(f,g))();
1187 }
1188
1189 template<bool B,typename T,typename std::enable_if<B>::type* =nullptr>
1190 void copy_assign_if(T& x,const T& y){x=y;}
1191
1192 template<bool B,typename T,typename std::enable_if<!B>::type* =nullptr>
1193 void copy_assign_if(T&,const T&){}
1194
1195 template<bool B,typename T,typename std::enable_if<B>::type* =nullptr>
1196 void move_assign_if(T& x,T& y){x=std::move(y);}
1197
1198 template<bool B,typename T,typename std::enable_if<!B>::type* =nullptr>
1199 void move_assign_if(T&,T&){}
1200
1201 template<bool B,typename T,typename std::enable_if<B>::type* =nullptr>
1202 void swap_if(T& x,T& y){using std::swap; swap(x,y);}
1203
1204 template<bool B,typename T,typename std::enable_if<!B>::type* =nullptr>
1205 void swap_if(T&,T&){}
1206
1207 template<typename Allocator>
1208 struct is_std_allocator:std::false_type{};
1209
1210 template<typename T>
1211 struct is_std_allocator<std::allocator<T>>:std::true_type{};
1212
1213
1214 #if defined(_LIBCPP_SUPPRESS_DEPRECATED_PUSH)
1215 _LIBCPP_SUPPRESS_DEPRECATED_PUSH
1216 #elif defined(_STL_DISABLE_DEPRECATED_WARNING)
1217 _STL_DISABLE_DEPRECATED_WARNING
1218 #elif defined(_MSC_VER)
1219 #pragma warning(push)
1220 #pragma warning(disable:4996)
1221 #endif
1222
1223 template<typename Allocator,typename Ptr,typename... Args>
1224 struct alloc_has_construct
1225 {
1226 private:
1227 template<typename Allocator2>
1228 static decltype(
1229 std::declval<Allocator2&>().construct(
1230 std::declval<Ptr>(),std::declval<Args&&>()...),
1231 std::true_type{}
1232 ) check(int);
1233
1234 template<typename> static std::false_type check(...);
1235
1236 public:
1237 static constexpr bool value=decltype(check<Allocator>(0))::value;
1238 };
1239
1240 #if defined(_LIBCPP_SUPPRESS_DEPRECATED_POP)
1241 _LIBCPP_SUPPRESS_DEPRECATED_POP
1242 #elif defined(_STL_RESTORE_DEPRECATED_WARNING)
1243 _STL_RESTORE_DEPRECATED_WARNING
1244 #elif defined(_MSC_VER)
1245 #pragma warning(pop)
1246 #endif
1247
1248
1249
1250
1251
1252 static constexpr float mlf=0.875f;
1253
1254 template<typename Group,typename Element>
1255 struct table_locator
1256 {
1257 table_locator()=default;
1258 table_locator(Group* pg_,unsigned int n_,Element* p_):pg{pg_},n{n_},p{p_}{}
1259
1260 explicit operator bool()const noexcept{return p!=nullptr;}
1261
1262 Group *pg=nullptr;
1263 unsigned int n=0;
1264 Element *p=nullptr;
1265 };
1266
1267 struct try_emplace_args_t{};
1268
1269 template<typename TypePolicy,typename Allocator,typename... Args>
1270 class alloc_cted_insert_type
1271 {
1272 using emplace_type=typename std::conditional<
1273 std::is_constructible<typename TypePolicy::init_type,Args...>::value,
1274 typename TypePolicy::init_type,
1275 typename TypePolicy::value_type
1276 >::type;
1277
1278 using insert_type=typename std::conditional<
1279 std::is_constructible<typename TypePolicy::value_type,emplace_type>::value,
1280 emplace_type,typename TypePolicy::element_type
1281 >::type;
1282
1283 using alloc_cted = allocator_constructed<Allocator, insert_type, TypePolicy>;
1284 alloc_cted val;
1285
1286 public:
1287 alloc_cted_insert_type(const Allocator& al_,Args&&... args):val{al_,std::forward<Args>(args)...}
1288 {
1289 }
1290
1291 insert_type& value(){return val.value();}
1292 };
1293
1294 template<typename TypePolicy,typename Allocator,typename... Args>
1295 alloc_cted_insert_type<TypePolicy,Allocator,Args...>
1296 alloc_make_insert_type(const Allocator& al,Args&&... args)
1297 {
1298 return {al,std::forward<Args>(args)...};
1299 }
1300
1301 template <typename TypePolicy, typename Allocator, typename KFwdRef,
1302 typename = void>
1303 class alloc_cted_or_fwded_key_type
1304 {
1305 using key_type = typename TypePolicy::key_type;
1306 allocator_constructed<Allocator, key_type, TypePolicy> val;
1307
1308 public:
1309 alloc_cted_or_fwded_key_type(const Allocator& al_, KFwdRef k)
1310 : val(al_, std::forward<KFwdRef>(k))
1311 {
1312 }
1313
1314 key_type&& move_or_fwd() { return std::move(val.value()); }
1315 };
1316
1317 template <typename TypePolicy, typename Allocator, typename KFwdRef>
1318 class alloc_cted_or_fwded_key_type<TypePolicy, Allocator, KFwdRef,
1319 typename std::enable_if<
1320 is_similar<KFwdRef, typename TypePolicy::key_type>::value>::type>
1321 {
1322
1323 BOOST_UNORDERED_STATIC_ASSERT(std::is_reference<KFwdRef>::value);
1324 KFwdRef ref;
1325
1326 public:
1327 alloc_cted_or_fwded_key_type(const Allocator&, KFwdRef k)
1328 : ref(std::forward<KFwdRef>(k))
1329 {
1330 }
1331
1332 KFwdRef move_or_fwd() { return std::forward<KFwdRef>(ref); }
1333 };
1334
1335 template <typename Container>
1336 using is_map =
1337 std::integral_constant<bool, !std::is_same<typename Container::key_type,
1338 typename Container::value_type>::value>;
1339
1340 template <typename Container, typename K>
1341 using is_emplace_kv_able = std::integral_constant<bool,
1342 is_map<Container>::value &&
1343 (is_similar<K, typename Container::key_type>::value ||
1344 is_complete_and_move_constructible<typename Container::key_type>::value)>;
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389 #include <boost/unordered/detail/foa/ignore_wshadow.hpp>
1390
1391 #if defined(BOOST_MSVC)
1392 #pragma warning(push)
1393 #pragma warning(disable:4714)
1394 #endif
1395
1396 #if BOOST_WORKAROUND(BOOST_MSVC,<=1900)
1397
1398
1399
1400 #pragma warning(push)
1401 #pragma warning(disable:4702)
1402 #endif
1403
1404 template<
1405 typename TypePolicy,typename Group,template<typename...> class Arrays,
1406 typename SizeControl,typename Hash,typename Pred,typename Allocator
1407 >
1408 class
1409
1410 #if defined(_MSC_VER)&&_MSC_FULL_VER>=190023918
1411 __declspec(empty_bases)
1412 #endif
1413
1414 table_core:empty_value<Hash,0>,empty_value<Pred,1>,empty_value<Allocator,2>
1415 {
1416 public:
1417 using type_policy=TypePolicy;
1418 using group_type=Group;
1419 static constexpr auto N=group_type::N;
1420 using size_policy=pow2_size_policy;
1421 using prober=pow2_quadratic_prober;
1422 using mix_policy=typename std::conditional<
1423 hash_is_avalanching<Hash>::value,
1424 no_mix,
1425 mulx_mix
1426 >::type;
1427 using alloc_traits=boost::allocator_traits<Allocator>;
1428 using element_type=typename type_policy::element_type;
1429 using arrays_type=Arrays<element_type,group_type,size_policy,Allocator>;
1430 using size_ctrl_type=SizeControl;
1431 static constexpr auto uses_fancy_pointers=!std::is_same<
1432 typename alloc_traits::pointer,
1433 typename alloc_traits::value_type*
1434 >::value;
1435
1436 using key_type=typename type_policy::key_type;
1437 using init_type=typename type_policy::init_type;
1438 using value_type=typename type_policy::value_type;
1439 using hasher=Hash;
1440 using key_equal=Pred;
1441 using allocator_type=Allocator;
1442 using pointer=value_type*;
1443 using const_pointer=const value_type*;
1444 using reference=value_type&;
1445 using const_reference=const value_type&;
1446 using size_type=std::size_t;
1447 using difference_type=std::ptrdiff_t;
1448 using locator=table_locator<group_type,element_type>;
1449 using arrays_holder_type=arrays_holder<arrays_type,Allocator>;
1450
1451 #if defined(BOOST_UNORDERED_ENABLE_STATS)
1452 using cumulative_stats=table_core_cumulative_stats;
1453 using stats=table_core_stats;
1454 #endif
1455
1456 table_core(
1457 std::size_t n=default_bucket_count,const Hash& h_=Hash(),
1458 const Pred& pred_=Pred(),const Allocator& al_=Allocator()):
1459 hash_base{empty_init,h_},pred_base{empty_init,pred_},
1460 allocator_base{empty_init,al_},arrays(new_arrays(n)),
1461 size_ctrl{initial_max_load(),0}
1462 {}
1463
1464
1465
1466
1467
1468 template<typename ArraysFn>
1469 table_core(
1470 Hash&& h_,Pred&& pred_,Allocator&& al_,
1471 ArraysFn arrays_fn,const size_ctrl_type& size_ctrl_):
1472 hash_base{empty_init,std::move(h_)},
1473 pred_base{empty_init,std::move(pred_)},
1474 allocator_base{empty_init,std::move(al_)},
1475 arrays(arrays_fn()),size_ctrl(size_ctrl_)
1476 {}
1477
1478 table_core(const table_core& x):
1479 table_core{x,alloc_traits::select_on_container_copy_construction(x.al())}{}
1480
1481 template<typename ArraysFn>
1482 table_core(table_core&& x,arrays_holder_type&& ah,ArraysFn arrays_fn):
1483 table_core(
1484 std::move(x.h()),std::move(x.pred()),std::move(x.al()),
1485 arrays_fn,x.size_ctrl)
1486 {
1487 x.arrays=ah.release();
1488 x.size_ctrl.ml=x.initial_max_load();
1489 x.size_ctrl.size=0;
1490 BOOST_UNORDERED_SWAP_STATS(cstats,x.cstats);
1491 }
1492
1493 table_core(table_core&& x)
1494 noexcept(
1495 std::is_nothrow_move_constructible<Hash>::value&&
1496 std::is_nothrow_move_constructible<Pred>::value&&
1497 std::is_nothrow_move_constructible<Allocator>::value&&
1498 !uses_fancy_pointers):
1499 table_core{
1500 std::move(x),x.make_empty_arrays(),[&x]{return x.arrays;}}
1501 {}
1502
1503 table_core(const table_core& x,const Allocator& al_):
1504 table_core{std::size_t(std::ceil(float(x.size())/mlf)),x.h(),x.pred(),al_}
1505 {
1506 copy_elements_from(x);
1507 }
1508
1509 table_core(table_core&& x,const Allocator& al_):
1510 table_core{std::move(x.h()),std::move(x.pred()),al_}
1511 {
1512 if(al()==x.al()){
1513 using std::swap;
1514 swap(arrays,x.arrays);
1515 swap(size_ctrl,x.size_ctrl);
1516 BOOST_UNORDERED_SWAP_STATS(cstats,x.cstats);
1517 }
1518 else{
1519 reserve(x.size());
1520 clear_on_exit c{x};
1521 (void)c;
1522 BOOST_UNORDERED_RESET_STATS_OF(x);
1523
1524
1525
1526
1527 x.for_all_elements([this](element_type* p){
1528 unchecked_insert(type_policy::move(type_policy::value_from(*p)));
1529 });
1530 }
1531 }
1532
1533 ~table_core()noexcept
1534 {
1535 for_all_elements([this](element_type* p){
1536 destroy_element(p);
1537 });
1538 delete_arrays(arrays);
1539 }
1540
1541 std::size_t initial_max_load()const
1542 {
1543 static constexpr std::size_t small_capacity=2*N-1;
1544
1545 auto capacity_=capacity();
1546 if(capacity_<=small_capacity){
1547 return capacity_;
1548 }
1549 else{
1550 return (std::size_t)(mlf*(float)(capacity_));
1551 }
1552 }
1553
1554 arrays_holder_type make_empty_arrays()const
1555 {
1556 return make_arrays(0);
1557 }
1558
1559 table_core& operator=(const table_core& x)
1560 {
1561 BOOST_UNORDERED_STATIC_ASSERT_HASH_PRED(Hash, Pred)
1562
1563 static constexpr auto pocca=
1564 alloc_traits::propagate_on_container_copy_assignment::value;
1565
1566 if(this!=std::addressof(x)){
1567
1568
1569
1570 hasher tmp_h=x.h();
1571 key_equal tmp_p=x.pred();
1572
1573 clear();
1574
1575
1576
1577
1578
1579 using std::swap;
1580 swap(h(),tmp_h);
1581 swap(pred(),tmp_p);
1582
1583 if_constexpr<pocca>([&,this]{
1584 if(al()!=x.al()){
1585 auto ah=x.make_arrays(std::size_t(std::ceil(float(x.size())/mlf)));
1586 delete_arrays(arrays);
1587 arrays=ah.release();
1588 size_ctrl.ml=initial_max_load();
1589 }
1590 copy_assign_if<pocca>(al(),x.al());
1591 });
1592
1593 noshrink_reserve(x.size());
1594 copy_elements_from(x);
1595 }
1596 return *this;
1597 }
1598
1599 #if defined(BOOST_MSVC)
1600 #pragma warning(push)
1601 #pragma warning(disable:4127)
1602 #endif
1603
1604 table_core& operator=(table_core&& x)
1605 noexcept(
1606 (alloc_traits::propagate_on_container_move_assignment::value||
1607 alloc_traits::is_always_equal::value)&&!uses_fancy_pointers)
1608 {
1609 BOOST_UNORDERED_STATIC_ASSERT_HASH_PRED(Hash, Pred)
1610
1611 static constexpr auto pocma=
1612 alloc_traits::propagate_on_container_move_assignment::value;
1613
1614 if(this!=std::addressof(x)){
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625 using std::swap;
1626
1627 clear();
1628
1629 if(pocma||al()==x.al()){
1630 auto ah=x.make_empty_arrays();
1631 swap(h(),x.h());
1632 swap(pred(),x.pred());
1633 delete_arrays(arrays);
1634 move_assign_if<pocma>(al(),x.al());
1635 arrays=x.arrays;
1636 size_ctrl.ml=std::size_t(x.size_ctrl.ml);
1637 size_ctrl.size=std::size_t(x.size_ctrl.size);
1638 BOOST_UNORDERED_COPY_STATS(cstats,x.cstats);
1639 x.arrays=ah.release();
1640 x.size_ctrl.ml=x.initial_max_load();
1641 x.size_ctrl.size=0;
1642 BOOST_UNORDERED_RESET_STATS_OF(x);
1643 }
1644 else{
1645 swap(h(),x.h());
1646 swap(pred(),x.pred());
1647
1648
1649 noshrink_reserve(x.size());
1650 clear_on_exit c{x};
1651 (void)c;
1652 BOOST_UNORDERED_RESET_STATS_OF(x);
1653
1654
1655
1656
1657 x.for_all_elements([this](element_type* p){
1658 unchecked_insert(type_policy::move(type_policy::value_from(*p)));
1659 });
1660 }
1661 }
1662 return *this;
1663 }
1664
1665 #if defined(BOOST_MSVC)
1666 #pragma warning(pop)
1667 #endif
1668
1669 allocator_type get_allocator()const noexcept{return al();}
1670
1671 bool empty()const noexcept{return size()==0;}
1672 std::size_t size()const noexcept{return size_ctrl.size;}
1673 std::size_t max_size()const noexcept{return SIZE_MAX;}
1674
1675 BOOST_FORCEINLINE
1676 void erase(group_type* pg,unsigned int pos,element_type* p)noexcept
1677 {
1678 destroy_element(p);
1679 recover_slot(pg,pos);
1680 }
1681
1682 BOOST_FORCEINLINE
1683 void erase(unsigned char* pc,element_type* p)noexcept
1684 {
1685 destroy_element(p);
1686 recover_slot(pc);
1687 }
1688
1689 template<typename Key>
1690 BOOST_FORCEINLINE locator find(const Key& x)const
1691 {
1692 auto hash=hash_for(x);
1693 return find(x,position_for(hash),hash);
1694 }
1695
1696 #if defined(BOOST_MSVC)
1697
1698 #pragma warning(push)
1699 #pragma warning(disable:4800)
1700 #endif
1701
1702 template<typename Key>
1703 BOOST_FORCEINLINE locator find(
1704 const Key& x,std::size_t pos0,std::size_t hash)const
1705 {
1706 BOOST_UNORDERED_STATS_COUNTER(num_cmps);
1707 prober pb(pos0);
1708 do{
1709 auto pos=pb.get();
1710 auto pg=arrays.groups()+pos;
1711 auto mask=pg->match(hash);
1712 if(mask){
1713 auto elements=arrays.elements();
1714 BOOST_UNORDERED_ASSUME(elements!=nullptr);
1715 auto p=elements+pos*N;
1716 BOOST_UNORDERED_PREFETCH_ELEMENTS(p,N);
1717 do{
1718 BOOST_UNORDERED_INCREMENT_STATS_COUNTER(num_cmps);
1719 auto n=unchecked_countr_zero(mask);
1720 if(BOOST_LIKELY(bool(pred()(x,key_from(p[n]))))){
1721 BOOST_UNORDERED_ADD_STATS(
1722 cstats.successful_lookup,(pb.length(),num_cmps));
1723 return {pg,n,p+n};
1724 }
1725 mask&=mask-1;
1726 }while(mask);
1727 }
1728 if(BOOST_LIKELY(pg->is_not_overflowed(hash))){
1729 BOOST_UNORDERED_ADD_STATS(
1730 cstats.unsuccessful_lookup,(pb.length(),num_cmps));
1731 return {};
1732 }
1733 }
1734 while(BOOST_LIKELY(pb.next(arrays.groups_size_mask)));
1735 BOOST_UNORDERED_ADD_STATS(
1736 cstats.unsuccessful_lookup,(pb.length(),num_cmps));
1737 return {};
1738 }
1739
1740 #if defined(BOOST_MSVC)
1741 #pragma warning(pop)
1742 #endif
1743
1744 void swap(table_core& x)
1745 noexcept(
1746 alloc_traits::propagate_on_container_swap::value||
1747 alloc_traits::is_always_equal::value)
1748 {
1749 BOOST_UNORDERED_STATIC_ASSERT_HASH_PRED(Hash, Pred)
1750
1751 static constexpr auto pocs=
1752 alloc_traits::propagate_on_container_swap::value;
1753
1754 using std::swap;
1755 if_constexpr<pocs>([&,this]{
1756 swap_if<pocs>(al(),x.al());
1757 },
1758 [&,this]{
1759 BOOST_ASSERT(al()==x.al());
1760 (void)this;
1761 });
1762
1763 swap(h(),x.h());
1764 swap(pred(),x.pred());
1765 swap(arrays,x.arrays);
1766 swap(size_ctrl,x.size_ctrl);
1767 }
1768
1769 void clear()noexcept
1770 {
1771 auto p=arrays.elements();
1772 if(p){
1773 for(auto pg=arrays.groups(),last=pg+arrays.groups_size_mask+1;
1774 pg!=last;++pg,p+=N){
1775 auto mask=match_really_occupied(pg,last);
1776 while(mask){
1777 destroy_element(p+unchecked_countr_zero(mask));
1778 mask&=mask-1;
1779 }
1780
1781 pg->initialize();
1782 }
1783 arrays.groups()[arrays.groups_size_mask].set_sentinel();
1784 size_ctrl.ml=initial_max_load();
1785 size_ctrl.size=0;
1786 }
1787 }
1788
1789 hasher hash_function()const{return h();}
1790 key_equal key_eq()const{return pred();}
1791
1792 std::size_t capacity()const noexcept
1793 {
1794 return arrays.elements()?(arrays.groups_size_mask+1)*N-1:0;
1795 }
1796
1797 float load_factor()const noexcept
1798 {
1799 if(capacity()==0)return 0;
1800 else return float(size())/float(capacity());
1801 }
1802
1803 float max_load_factor()const noexcept{return mlf;}
1804
1805 std::size_t max_load()const noexcept{return size_ctrl.ml;}
1806
1807 void rehash(std::size_t n)
1808 {
1809 auto m=size_t(std::ceil(float(size())/mlf));
1810 if(m>n)n=m;
1811 if(n)n=capacity_for(n);
1812
1813 if(n!=capacity())unchecked_rehash(n);
1814 }
1815
1816 void reserve(std::size_t n)
1817 {
1818 rehash(std::size_t(std::ceil(float(n)/mlf)));
1819 }
1820
1821 #if defined(BOOST_UNORDERED_ENABLE_STATS)
1822 stats get_stats()const
1823 {
1824 auto insertion=cstats.insertion.get_summary();
1825 auto successful_lookup=cstats.successful_lookup.get_summary();
1826 auto unsuccessful_lookup=cstats.unsuccessful_lookup.get_summary();
1827 return{
1828 {
1829 insertion.count,
1830 insertion.sequence_summary[0]
1831 },
1832 {
1833 successful_lookup.count,
1834 successful_lookup.sequence_summary[0],
1835 successful_lookup.sequence_summary[1]
1836 },
1837 {
1838 unsuccessful_lookup.count,
1839 unsuccessful_lookup.sequence_summary[0],
1840 unsuccessful_lookup.sequence_summary[1]
1841 },
1842 };
1843 }
1844
1845 void reset_stats()noexcept
1846 {
1847 cstats.insertion.reset();
1848 cstats.successful_lookup.reset();
1849 cstats.unsuccessful_lookup.reset();
1850 }
1851 #endif
1852
1853 friend bool operator==(const table_core& x,const table_core& y)
1854 {
1855 return
1856 x.size()==y.size()&&
1857 x.for_all_elements_while([&](element_type* p){
1858 auto loc=y.find(key_from(*p));
1859 return loc&&
1860 const_cast<const value_type&>(type_policy::value_from(*p))==
1861 const_cast<const value_type&>(type_policy::value_from(*loc.p));
1862 });
1863 }
1864
1865 friend bool operator!=(const table_core& x,const table_core& y)
1866 {
1867 return !(x==y);
1868 }
1869
1870 struct clear_on_exit
1871 {
1872 ~clear_on_exit(){x.clear();}
1873 table_core& x;
1874 };
1875
1876 Hash& h(){return hash_base::get();}
1877 const Hash& h()const{return hash_base::get();}
1878 Pred& pred(){return pred_base::get();}
1879 const Pred& pred()const{return pred_base::get();}
1880 Allocator& al(){return allocator_base::get();}
1881 const Allocator& al()const{return allocator_base::get();}
1882
1883 template<typename... Args>
1884 void construct_element(element_type* p,Args&&... args)
1885 {
1886 type_policy::construct(al(),p,std::forward<Args>(args)...);
1887 }
1888
1889 template<typename... Args>
1890 void construct_element(element_type* p,try_emplace_args_t,Args&&... args)
1891 {
1892 construct_element_from_try_emplace_args(
1893 p,
1894 std::integral_constant<bool,std::is_same<key_type,value_type>::value>{},
1895 std::forward<Args>(args)...);
1896 }
1897
1898 void destroy_element(element_type* p)noexcept
1899 {
1900 type_policy::destroy(al(),p);
1901 }
1902
1903 struct destroy_element_on_exit
1904 {
1905 ~destroy_element_on_exit(){this_->destroy_element(p);}
1906 table_core *this_;
1907 element_type *p;
1908 };
1909
1910 template<typename T>
1911 static inline auto key_from(const T& x)
1912 ->decltype(type_policy::extract(x))
1913 {
1914 return type_policy::extract(x);
1915 }
1916
1917 template<typename Key,typename... Args>
1918 static inline const Key& key_from(
1919 try_emplace_args_t,const Key& x,const Args&...)
1920 {
1921 return x;
1922 }
1923
1924 template<typename Key>
1925 inline std::size_t hash_for(const Key& x)const
1926 {
1927 return mix_policy::mix(h(),x);
1928 }
1929
1930 inline std::size_t position_for(std::size_t hash)const
1931 {
1932 return position_for(hash,arrays);
1933 }
1934
1935 static inline std::size_t position_for(
1936 std::size_t hash,const arrays_type& arrays_)
1937 {
1938 return size_policy::position(hash,arrays_.groups_size_index);
1939 }
1940
1941 static inline int match_really_occupied(group_type* pg,group_type* last)
1942 {
1943
1944 return pg->match_occupied()&~(int(pg==last-1)<<(N-1));
1945 }
1946
1947 template<typename... Args>
1948 locator unchecked_emplace_at(
1949 std::size_t pos0,std::size_t hash,Args&&... args)
1950 {
1951 auto res=nosize_unchecked_emplace_at(
1952 arrays,pos0,hash,std::forward<Args>(args)...);
1953 ++size_ctrl.size;
1954 return res;
1955 }
1956
1957 BOOST_NOINLINE void unchecked_rehash_for_growth()
1958 {
1959 auto new_arrays_=new_arrays_for_growth();
1960 unchecked_rehash(new_arrays_);
1961 }
1962
1963 template<typename... Args>
1964 BOOST_NOINLINE locator
1965 unchecked_emplace_with_rehash(std::size_t hash,Args&&... args)
1966 {
1967 auto new_arrays_=new_arrays_for_growth();
1968 locator it;
1969 BOOST_TRY{
1970
1971 it=nosize_unchecked_emplace_at(
1972 new_arrays_,position_for(hash,new_arrays_),
1973 hash,std::forward<Args>(args)...);
1974 }
1975 BOOST_CATCH(...){
1976 delete_arrays(new_arrays_);
1977 BOOST_RETHROW
1978 }
1979 BOOST_CATCH_END
1980
1981
1982 unchecked_rehash(new_arrays_);
1983 ++size_ctrl.size;
1984 return it;
1985 }
1986
1987 void noshrink_reserve(std::size_t n)
1988 {
1989
1990 BOOST_ASSERT(empty());
1991
1992 if(n){
1993 n=std::size_t(std::ceil(float(n)/mlf));
1994 n=capacity_for(n);
1995
1996 if(n>capacity()){
1997 auto new_arrays_=new_arrays(n);
1998 delete_arrays(arrays);
1999 arrays=new_arrays_;
2000 size_ctrl.ml=initial_max_load();
2001 }
2002 }
2003 }
2004
2005 template<typename F>
2006 void for_all_elements(F f)const
2007 {
2008 for_all_elements(arrays,f);
2009 }
2010
2011 template<typename F>
2012 static auto for_all_elements(const arrays_type& arrays_,F f)
2013 ->decltype(f(nullptr),void())
2014 {
2015 for_all_elements_while(arrays_,[&](element_type* p){f(p);return true;});
2016 }
2017
2018 template<typename F>
2019 static auto for_all_elements(const arrays_type& arrays_,F f)
2020 ->decltype(f(nullptr,0,nullptr),void())
2021 {
2022 for_all_elements_while(
2023 arrays_,[&](group_type* pg,unsigned int n,element_type* p)
2024 {f(pg,n,p);return true;});
2025 }
2026
2027 template<typename F>
2028 bool for_all_elements_while(F f)const
2029 {
2030 return for_all_elements_while(arrays,f);
2031 }
2032
2033 template<typename F>
2034 static auto for_all_elements_while(const arrays_type& arrays_,F f)
2035 ->decltype(f(nullptr),bool())
2036 {
2037 return for_all_elements_while(
2038 arrays_,[&](group_type*,unsigned int,element_type* p){return f(p);});
2039 }
2040
2041 template<typename F>
2042 static auto for_all_elements_while(const arrays_type& arrays_,F f)
2043 ->decltype(f(nullptr,0,nullptr),bool())
2044 {
2045 auto p=arrays_.elements();
2046 if(p){
2047 for(auto pg=arrays_.groups(),last=pg+arrays_.groups_size_mask+1;
2048 pg!=last;++pg,p+=N){
2049 auto mask=match_really_occupied(pg,last);
2050 while(mask){
2051 auto n=unchecked_countr_zero(mask);
2052 if(!f(pg,n,p+n))return false;
2053 mask&=mask-1;
2054 }
2055 }
2056 }
2057 return true;
2058 }
2059
2060 arrays_type arrays;
2061 size_ctrl_type size_ctrl;
2062
2063 #if defined(BOOST_UNORDERED_ENABLE_STATS)
2064 mutable cumulative_stats cstats;
2065 #endif
2066
2067 private:
2068 template<
2069 typename,typename,template<typename...> class,
2070 typename,typename,typename,typename
2071 >
2072 friend class table_core;
2073
2074 using hash_base=empty_value<Hash,0>;
2075 using pred_base=empty_value<Pred,1>;
2076 using allocator_base=empty_value<Allocator,2>;
2077
2078
2079
2080 table_core(Hash&& h_,Pred&& pred_,const Allocator& al_):
2081 hash_base{empty_init,std::move(h_)},
2082 pred_base{empty_init,std::move(pred_)},
2083 allocator_base{empty_init,al_},arrays(new_arrays(0)),
2084 size_ctrl{initial_max_load(),0}
2085 {
2086 }
2087
2088 arrays_type new_arrays(std::size_t n)const
2089 {
2090 return arrays_type::new_(typename arrays_type::allocator_type(al()),n);
2091 }
2092
2093 arrays_type new_arrays_for_growth()const
2094 {
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105 return new_arrays(std::size_t(
2106 std::ceil(static_cast<float>(size()+size()/61+1)/mlf)));
2107 }
2108
2109 void delete_arrays(arrays_type& arrays_)noexcept
2110 {
2111 arrays_type::delete_(typename arrays_type::allocator_type(al()),arrays_);
2112 }
2113
2114 arrays_holder_type make_arrays(std::size_t n)const
2115 {
2116 return {new_arrays(n),al()};
2117 }
2118
2119 template<typename Key,typename... Args>
2120 void construct_element_from_try_emplace_args(
2121 element_type* p,std::false_type,Key&& x,Args&&... args)
2122 {
2123 type_policy::construct(
2124 this->al(),p,
2125 std::piecewise_construct,
2126 std::forward_as_tuple(std::forward<Key>(x)),
2127 std::forward_as_tuple(std::forward<Args>(args)...));
2128 }
2129
2130
2131
2132
2133
2134 template<typename Key>
2135 void construct_element_from_try_emplace_args(
2136 element_type* p,std::true_type,Key&& x)
2137 {
2138 type_policy::construct(this->al(),p,std::forward<Key>(x));
2139 }
2140
2141 void copy_elements_from(const table_core& x)
2142 {
2143 BOOST_ASSERT(empty());
2144 BOOST_ASSERT(this!=std::addressof(x));
2145 if(arrays.groups_size_mask==x.arrays.groups_size_mask){
2146 fast_copy_elements_from(x);
2147 }
2148 else{
2149 x.for_all_elements([this](const element_type* p){
2150 unchecked_insert(*p);
2151 });
2152 }
2153 }
2154
2155 void fast_copy_elements_from(const table_core& x)
2156 {
2157 if(arrays.elements()&&x.arrays.elements()){
2158 copy_elements_array_from(x);
2159 copy_groups_array_from(x);
2160 size_ctrl.ml=std::size_t(x.size_ctrl.ml);
2161 size_ctrl.size=std::size_t(x.size_ctrl.size);
2162 }
2163 }
2164
2165 void copy_elements_array_from(const table_core& x)
2166 {
2167 copy_elements_array_from(
2168 x,
2169 std::integral_constant<
2170 bool,
2171 is_trivially_copy_constructible<element_type>::value&&(
2172 is_std_allocator<Allocator>::value||
2173 !alloc_has_construct<Allocator,value_type*,const value_type&>::value)
2174 >{}
2175 );
2176 }
2177
2178 void copy_elements_array_from(
2179 const table_core& x,std::true_type )
2180 {
2181
2182
2183
2184 std::memcpy(
2185 reinterpret_cast<unsigned char*>(arrays.elements()),
2186 reinterpret_cast<unsigned char*>(x.arrays.elements()),
2187 x.capacity()*sizeof(value_type));
2188 }
2189
2190 void copy_elements_array_from(
2191 const table_core& x,std::false_type )
2192 {
2193 std::size_t num_constructed=0;
2194 BOOST_TRY{
2195 x.for_all_elements([&,this](const element_type* p){
2196 construct_element(arrays.elements()+(p-x.arrays.elements()),*p);
2197 ++num_constructed;
2198 });
2199 }
2200 BOOST_CATCH(...){
2201 if(num_constructed){
2202 x.for_all_elements_while([&,this](const element_type* p){
2203 destroy_element(arrays.elements()+(p-x.arrays.elements()));
2204 return --num_constructed!=0;
2205 });
2206 }
2207 BOOST_RETHROW
2208 }
2209 BOOST_CATCH_END
2210 }
2211
2212 void copy_groups_array_from(const table_core& x) {
2213 copy_groups_array_from(x,is_trivially_copy_assignable<group_type>{});
2214 }
2215
2216 void copy_groups_array_from(
2217 const table_core& x, std::true_type )
2218 {
2219 std::memcpy(
2220 arrays.groups(),x.arrays.groups(),
2221 (arrays.groups_size_mask+1)*sizeof(group_type));
2222 }
2223
2224 void copy_groups_array_from(
2225 const table_core& x, std::false_type )
2226 {
2227 auto pg=arrays.groups();
2228 auto xpg=x.arrays.groups();
2229 for(std::size_t i=0;i<arrays.groups_size_mask+1;++i){
2230 pg[i]=xpg[i];
2231 }
2232 }
2233
2234 void recover_slot(unsigned char* pc)
2235 {
2236
2237
2238
2239
2240 size_ctrl.ml-=group_type::maybe_caused_overflow(pc);
2241 group_type::reset(pc);
2242 --size_ctrl.size;
2243 }
2244
2245 void recover_slot(group_type* pg,std::size_t pos)
2246 {
2247 recover_slot(reinterpret_cast<unsigned char*>(pg)+pos);
2248 }
2249
2250 static std::size_t capacity_for(std::size_t n)
2251 {
2252 return size_policy::size(size_index_for<group_type,size_policy>(n))*N-1;
2253 }
2254
2255 BOOST_NOINLINE void unchecked_rehash(std::size_t n)
2256 {
2257 auto new_arrays_=new_arrays(n);
2258 unchecked_rehash(new_arrays_);
2259 }
2260
2261 BOOST_NOINLINE void unchecked_rehash(arrays_type& new_arrays_)
2262 {
2263 std::size_t num_destroyed=0;
2264 BOOST_TRY{
2265 for_all_elements([&,this](element_type* p){
2266 nosize_transfer_element(p,new_arrays_,num_destroyed);
2267 });
2268 }
2269 BOOST_CATCH(...){
2270 if(num_destroyed){
2271 for_all_elements_while(
2272 [&,this](group_type* pg,unsigned int n,element_type*){
2273 recover_slot(pg,n);
2274 return --num_destroyed!=0;
2275 }
2276 );
2277 }
2278 for_all_elements(new_arrays_,[this](element_type* p){
2279 destroy_element(p);
2280 });
2281 delete_arrays(new_arrays_);
2282 BOOST_RETHROW
2283 }
2284 BOOST_CATCH_END
2285
2286
2287 BOOST_ASSERT(num_destroyed==size()||num_destroyed==0);
2288 if(num_destroyed!=size()){
2289 for_all_elements([this](element_type* p){
2290 destroy_element(p);
2291 });
2292 }
2293 delete_arrays(arrays);
2294 arrays=new_arrays_;
2295 size_ctrl.ml=initial_max_load();
2296 }
2297
2298 template<typename Value>
2299 void unchecked_insert(Value&& x)
2300 {
2301 auto hash=hash_for(key_from(x));
2302 unchecked_emplace_at(position_for(hash),hash,std::forward<Value>(x));
2303 }
2304
2305 void nosize_transfer_element(
2306 element_type* p,const arrays_type& arrays_,std::size_t& num_destroyed)
2307 {
2308 nosize_transfer_element(
2309 p,hash_for(key_from(*p)),arrays_,num_destroyed,
2310 std::integral_constant<
2311 bool,
2312 std::is_nothrow_move_constructible<init_type>::value||
2313 !std::is_same<element_type,value_type>::value||
2314 !std::is_copy_constructible<element_type>::value>{});
2315 }
2316
2317 void nosize_transfer_element(
2318 element_type* p,std::size_t hash,const arrays_type& arrays_,
2319 std::size_t& num_destroyed,std::true_type )
2320 {
2321
2322
2323
2324 ++num_destroyed;
2325 destroy_element_on_exit d{this,p};
2326 (void)d;
2327 nosize_unchecked_emplace_at(
2328 arrays_,position_for(hash,arrays_),hash,type_policy::move(*p));
2329 }
2330
2331 void nosize_transfer_element(
2332 element_type* p,std::size_t hash,const arrays_type& arrays_,
2333 std::size_t& ,std::false_type )
2334 {
2335 nosize_unchecked_emplace_at(
2336 arrays_,position_for(hash,arrays_),hash,
2337 const_cast<const element_type&>(*p));
2338 }
2339
2340 template<typename... Args>
2341 locator nosize_unchecked_emplace_at(
2342 const arrays_type& arrays_,std::size_t pos0,std::size_t hash,
2343 Args&&... args)
2344 {
2345 for(prober pb(pos0);;pb.next(arrays_.groups_size_mask)){
2346 auto pos=pb.get();
2347 auto pg=arrays_.groups()+pos;
2348 auto mask=pg->match_available();
2349 if(BOOST_LIKELY(mask!=0)){
2350 auto n=unchecked_countr_zero(mask);
2351 auto p=arrays_.elements()+pos*N+n;
2352 construct_element(p,std::forward<Args>(args)...);
2353 pg->set(n,hash);
2354 BOOST_UNORDERED_ADD_STATS(cstats.insertion,(pb.length()));
2355 return {pg,n,p};
2356 }
2357 else pg->mark_overflow(hash);
2358 }
2359 }
2360 };
2361
2362 #if BOOST_WORKAROUND(BOOST_MSVC,<=1900)
2363 #pragma warning(pop)
2364 #endif
2365
2366 #if defined(BOOST_MSVC)
2367 #pragma warning(pop)
2368 #endif
2369
2370 #include <boost/unordered/detail/foa/restore_wshadow.hpp>
2371
2372 }
2373 }
2374 }
2375 }
2376
2377 #undef BOOST_UNORDERED_STATIC_ASSERT_HASH_PRED
2378 #undef BOOST_UNORDERED_HAS_FEATURE
2379 #undef BOOST_UNORDERED_HAS_BUILTIN
2380 #endif