File indexing completed on 2025-01-18 09:27:13
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086
0087
0088
0089
0090
0091
0092
0093
0094
0095
0096
0097
0098
0099
0100
0101
0102
0103
0104
0105
0106
0107
0108
0109
0110
0111
0112
0113
0114
0115
0116
0117
0118
0119
0120
0121
0122
0123
0124
0125
0126
0127
0128
0129
0130
0131
0132
0133
0134
0135
0136
0137
0138
0139
0140
0141
0142
0143
0144
0145
0146
0147
0148
0149
0150
0151
0152
0153
0154
0155
0156
0157
0158
0159
0160
0161
0162
0163
0164
0165
0166
0167
0168
0169
0170
0171
0172
0173
0174
0175
0176
0177
0178
0179
0180
0181
0182
0183
0184 #ifndef ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_
0185 #define ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_
0186
0187 #include <algorithm>
0188 #include <cassert>
0189 #include <cmath>
0190 #include <cstddef>
0191 #include <cstdint>
0192 #include <cstring>
0193 #include <initializer_list>
0194 #include <iterator>
0195 #include <limits>
0196 #include <memory>
0197 #include <tuple>
0198 #include <type_traits>
0199 #include <utility>
0200
0201 #include "absl/base/attributes.h"
0202 #include "absl/base/config.h"
0203 #include "absl/base/internal/endian.h"
0204 #include "absl/base/internal/raw_logging.h"
0205 #include "absl/base/macros.h"
0206 #include "absl/base/optimization.h"
0207 #include "absl/base/options.h"
0208 #include "absl/base/port.h"
0209 #include "absl/base/prefetch.h"
0210 #include "absl/container/internal/common.h" // IWYU pragma: export // for node_handle
0211 #include "absl/container/internal/compressed_tuple.h"
0212 #include "absl/container/internal/container_memory.h"
0213 #include "absl/container/internal/hash_policy_traits.h"
0214 #include "absl/container/internal/hashtable_debug_hooks.h"
0215 #include "absl/container/internal/hashtablez_sampler.h"
0216 #include "absl/memory/memory.h"
0217 #include "absl/meta/type_traits.h"
0218 #include "absl/numeric/bits.h"
0219 #include "absl/utility/utility.h"
0220
0221 #ifdef ABSL_INTERNAL_HAVE_SSE2
0222 #include <emmintrin.h>
0223 #endif
0224
0225 #ifdef ABSL_INTERNAL_HAVE_SSSE3
0226 #include <tmmintrin.h>
0227 #endif
0228
0229 #ifdef _MSC_VER
0230 #include <intrin.h>
0231 #endif
0232
0233 #ifdef ABSL_INTERNAL_HAVE_ARM_NEON
0234 #include <arm_neon.h>
0235 #endif
0236
0237 namespace absl {
0238 ABSL_NAMESPACE_BEGIN
0239 namespace container_internal {
0240
0241 #ifdef ABSL_SWISSTABLE_ENABLE_GENERATIONS
0242 #error ABSL_SWISSTABLE_ENABLE_GENERATIONS cannot be directly set
0243 #elif (defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
0244 defined(ABSL_HAVE_HWADDRESS_SANITIZER) || \
0245 defined(ABSL_HAVE_MEMORY_SANITIZER)) && \
0246 !defined(NDEBUG_SANITIZER)
0247
0248
0249
0250
0251
0252 #define ABSL_SWISSTABLE_ENABLE_GENERATIONS
0253 #endif
0254
0255
0256 using GenerationType = uint8_t;
0257
0258
0259
0260 constexpr GenerationType SentinelEmptyGeneration() { return 0; }
0261
0262 constexpr GenerationType NextGeneration(GenerationType generation) {
0263 return ++generation == SentinelEmptyGeneration() ? ++generation : generation;
0264 }
0265
0266 #ifdef ABSL_SWISSTABLE_ENABLE_GENERATIONS
0267 constexpr bool SwisstableGenerationsEnabled() { return true; }
0268 constexpr size_t NumGenerationBytes() { return sizeof(GenerationType); }
0269 #else
0270 constexpr bool SwisstableGenerationsEnabled() { return false; }
0271 constexpr size_t NumGenerationBytes() { return 0; }
0272 #endif
0273
0274 template <typename AllocType>
0275 void SwapAlloc(AllocType& lhs, AllocType& rhs,
0276 std::true_type ) {
0277 using std::swap;
0278 swap(lhs, rhs);
0279 }
0280 template <typename AllocType>
0281 void SwapAlloc(AllocType& lhs, AllocType& rhs,
0282 std::false_type ) {
0283 (void)lhs;
0284 (void)rhs;
0285 assert(lhs == rhs &&
0286 "It's UB to call swap with unequal non-propagating allocators.");
0287 }
0288
0289 template <typename AllocType>
0290 void CopyAlloc(AllocType& lhs, AllocType& rhs,
0291 std::true_type ) {
0292 lhs = rhs;
0293 }
0294 template <typename AllocType>
0295 void CopyAlloc(AllocType&, AllocType&, std::false_type ) {}
0296
0297
0298
0299
0300
0301
0302
0303
0304
0305
0306
0307
0308
0309
0310
0311
0312
0313
0314
0315
0316
0317
0318 template <size_t Width>
0319 class probe_seq {
0320 public:
0321
0322
0323
0324 probe_seq(size_t hash, size_t mask) {
0325 assert(((mask + 1) & mask) == 0 && "not a mask");
0326 mask_ = mask;
0327 offset_ = hash & mask_;
0328 }
0329
0330
0331 size_t offset() const { return offset_; }
0332 size_t offset(size_t i) const { return (offset_ + i) & mask_; }
0333
0334 void next() {
0335 index_ += Width;
0336 offset_ += index_;
0337 offset_ &= mask_;
0338 }
0339
0340 size_t index() const { return index_; }
0341
0342 private:
0343 size_t mask_;
0344 size_t offset_;
0345 size_t index_ = 0;
0346 };
0347
0348 template <class ContainerKey, class Hash, class Eq>
0349 struct RequireUsableKey {
0350 template <class PassedKey, class... Args>
0351 std::pair<
0352 decltype(std::declval<const Hash&>()(std::declval<const PassedKey&>())),
0353 decltype(std::declval<const Eq&>()(std::declval<const ContainerKey&>(),
0354 std::declval<const PassedKey&>()))>*
0355 operator()(const PassedKey&, const Args&...) const;
0356 };
0357
0358 template <class E, class Policy, class Hash, class Eq, class... Ts>
0359 struct IsDecomposable : std::false_type {};
0360
0361 template <class Policy, class Hash, class Eq, class... Ts>
0362 struct IsDecomposable<
0363 absl::void_t<decltype(Policy::apply(
0364 RequireUsableKey<typename Policy::key_type, Hash, Eq>(),
0365 std::declval<Ts>()...))>,
0366 Policy, Hash, Eq, Ts...> : std::true_type {};
0367
0368
0369 template <class T>
0370 constexpr bool IsNoThrowSwappable(std::true_type = {} ) {
0371 using std::swap;
0372 return noexcept(swap(std::declval<T&>(), std::declval<T&>()));
0373 }
0374 template <class T>
0375 constexpr bool IsNoThrowSwappable(std::false_type ) {
0376 return false;
0377 }
0378
0379 template <typename T>
0380 uint32_t TrailingZeros(T x) {
0381 ABSL_ASSUME(x != 0);
0382 return static_cast<uint32_t>(countr_zero(x));
0383 }
0384
0385
0386 constexpr uint64_t kMsbs8Bytes = 0x8080808080808080ULL;
0387
0388
0389
0390
0391
0392
0393
0394
0395
0396
0397 template <class T, int SignificantBits, int Shift = 0>
0398 class NonIterableBitMask {
0399 public:
0400 explicit NonIterableBitMask(T mask) : mask_(mask) {}
0401
0402 explicit operator bool() const { return this->mask_ != 0; }
0403
0404
0405 uint32_t LowestBitSet() const {
0406 return container_internal::TrailingZeros(mask_) >> Shift;
0407 }
0408
0409
0410 uint32_t HighestBitSet() const {
0411 return static_cast<uint32_t>((bit_width(mask_) - 1) >> Shift);
0412 }
0413
0414
0415 uint32_t TrailingZeros() const {
0416 return container_internal::TrailingZeros(mask_) >> Shift;
0417 }
0418
0419
0420 uint32_t LeadingZeros() const {
0421 constexpr int total_significant_bits = SignificantBits << Shift;
0422 constexpr int extra_bits = sizeof(T) * 8 - total_significant_bits;
0423 return static_cast<uint32_t>(
0424 countl_zero(static_cast<T>(mask_ << extra_bits))) >>
0425 Shift;
0426 }
0427
0428 T mask_;
0429 };
0430
0431
0432
0433
0434
0435
0436
0437
0438
0439
0440
0441
0442
0443
0444 template <class T, int SignificantBits, int Shift = 0,
0445 bool NullifyBitsOnIteration = false>
0446 class BitMask : public NonIterableBitMask<T, SignificantBits, Shift> {
0447 using Base = NonIterableBitMask<T, SignificantBits, Shift>;
0448 static_assert(std::is_unsigned<T>::value, "");
0449 static_assert(Shift == 0 || Shift == 3, "");
0450 static_assert(!NullifyBitsOnIteration || Shift == 3, "");
0451
0452 public:
0453 explicit BitMask(T mask) : Base(mask) {
0454 if (Shift == 3 && !NullifyBitsOnIteration) {
0455 assert(this->mask_ == (this->mask_ & kMsbs8Bytes));
0456 }
0457 }
0458
0459 using value_type = int;
0460 using iterator = BitMask;
0461 using const_iterator = BitMask;
0462
0463 BitMask& operator++() {
0464 if (Shift == 3 && NullifyBitsOnIteration) {
0465 this->mask_ &= kMsbs8Bytes;
0466 }
0467 this->mask_ &= (this->mask_ - 1);
0468 return *this;
0469 }
0470
0471 uint32_t operator*() const { return Base::LowestBitSet(); }
0472
0473 BitMask begin() const { return *this; }
0474 BitMask end() const { return BitMask(0); }
0475
0476 private:
0477 friend bool operator==(const BitMask& a, const BitMask& b) {
0478 return a.mask_ == b.mask_;
0479 }
0480 friend bool operator!=(const BitMask& a, const BitMask& b) {
0481 return a.mask_ != b.mask_;
0482 }
0483 };
0484
0485 using h2_t = uint8_t;
0486
0487
0488
0489
0490
0491
0492
0493
0494
0495
0496
0497
0498
0499
0500
0501
0502
0503
0504 enum class ctrl_t : int8_t {
0505 kEmpty = -128,
0506 kDeleted = -2,
0507 kSentinel = -1,
0508 };
0509 static_assert(
0510 (static_cast<int8_t>(ctrl_t::kEmpty) &
0511 static_cast<int8_t>(ctrl_t::kDeleted) &
0512 static_cast<int8_t>(ctrl_t::kSentinel) & 0x80) != 0,
0513 "Special markers need to have the MSB to make checking for them efficient");
0514 static_assert(
0515 ctrl_t::kEmpty < ctrl_t::kSentinel && ctrl_t::kDeleted < ctrl_t::kSentinel,
0516 "ctrl_t::kEmpty and ctrl_t::kDeleted must be smaller than "
0517 "ctrl_t::kSentinel to make the SIMD test of IsEmptyOrDeleted() efficient");
0518 static_assert(
0519 ctrl_t::kSentinel == static_cast<ctrl_t>(-1),
0520 "ctrl_t::kSentinel must be -1 to elide loading it from memory into SIMD "
0521 "registers (pcmpeqd xmm, xmm)");
0522 static_assert(ctrl_t::kEmpty == static_cast<ctrl_t>(-128),
0523 "ctrl_t::kEmpty must be -128 to make the SIMD check for its "
0524 "existence efficient (psignb xmm, xmm)");
0525 static_assert(
0526 (~static_cast<int8_t>(ctrl_t::kEmpty) &
0527 ~static_cast<int8_t>(ctrl_t::kDeleted) &
0528 static_cast<int8_t>(ctrl_t::kSentinel) & 0x7F) != 0,
0529 "ctrl_t::kEmpty and ctrl_t::kDeleted must share an unset bit that is not "
0530 "shared by ctrl_t::kSentinel to make the scalar test for "
0531 "MaskEmptyOrDeleted() efficient");
0532 static_assert(ctrl_t::kDeleted == static_cast<ctrl_t>(-2),
0533 "ctrl_t::kDeleted must be -2 to make the implementation of "
0534 "ConvertSpecialToEmptyAndFullToDeleted efficient");
0535
0536
0537 ABSL_DLL extern const ctrl_t kEmptyGroup[32];
0538
0539
0540 inline ctrl_t* EmptyGroup() {
0541
0542
0543 return const_cast<ctrl_t*>(kEmptyGroup + 16);
0544 }
0545
0546
0547
0548
0549 ABSL_DLL extern const ctrl_t kSooControl[17];
0550
0551
0552 inline ctrl_t* SooControl() {
0553
0554
0555 return const_cast<ctrl_t*>(kSooControl);
0556 }
0557
0558 inline bool IsSooControl(const ctrl_t* ctrl) { return ctrl == SooControl(); }
0559
0560
0561 GenerationType* EmptyGeneration();
0562
0563
0564
0565 inline bool IsEmptyGeneration(const GenerationType* generation) {
0566 return *generation == SentinelEmptyGeneration();
0567 }
0568
0569
0570
0571 bool ShouldInsertBackwardsForDebug(size_t capacity, size_t hash,
0572 const ctrl_t* ctrl);
0573
0574 ABSL_ATTRIBUTE_ALWAYS_INLINE inline bool ShouldInsertBackwards(
0575 ABSL_ATTRIBUTE_UNUSED size_t capacity, ABSL_ATTRIBUTE_UNUSED size_t hash,
0576 ABSL_ATTRIBUTE_UNUSED const ctrl_t* ctrl) {
0577 #if defined(NDEBUG)
0578 return false;
0579 #else
0580 return ShouldInsertBackwardsForDebug(capacity, hash, ctrl);
0581 #endif
0582 }
0583
0584
0585
0586
0587
0588
0589 template <class Mask>
0590 ABSL_ATTRIBUTE_ALWAYS_INLINE inline auto GetInsertionOffset(
0591 Mask mask, ABSL_ATTRIBUTE_UNUSED size_t capacity,
0592 ABSL_ATTRIBUTE_UNUSED size_t hash,
0593 ABSL_ATTRIBUTE_UNUSED const ctrl_t* ctrl) {
0594 #if defined(NDEBUG)
0595 return mask.LowestBitSet();
0596 #else
0597 return ShouldInsertBackwardsForDebug(capacity, hash, ctrl)
0598 ? mask.HighestBitSet()
0599 : mask.LowestBitSet();
0600 #endif
0601 }
0602
0603
0604
0605
0606
0607
0608 inline size_t PerTableSalt(const ctrl_t* ctrl) {
0609
0610
0611
0612 return reinterpret_cast<uintptr_t>(ctrl) >> 12;
0613 }
0614
0615 inline size_t H1(size_t hash, const ctrl_t* ctrl) {
0616 return (hash >> 7) ^ PerTableSalt(ctrl);
0617 }
0618
0619
0620
0621
0622 inline h2_t H2(size_t hash) { return hash & 0x7F; }
0623
0624
0625 inline bool IsEmpty(ctrl_t c) { return c == ctrl_t::kEmpty; }
0626 inline bool IsFull(ctrl_t c) {
0627
0628
0629
0630 return static_cast<std::underlying_type_t<ctrl_t>>(c) >= 0;
0631 }
0632 inline bool IsDeleted(ctrl_t c) { return c == ctrl_t::kDeleted; }
0633 inline bool IsEmptyOrDeleted(ctrl_t c) { return c < ctrl_t::kSentinel; }
0634
0635 #ifdef ABSL_INTERNAL_HAVE_SSE2
0636
0637
0638
0639
0640
0641
0642
0643
0644
0645
0646
0647
0648
0649
0650
0651
0652
0653
0654
0655
0656
0657
0658
0659
0660
0661
0662
0663
0664
0665
0666
0667
0668
0669 inline __m128i _mm_cmpgt_epi8_fixed(__m128i a, __m128i b) {
0670 #if defined(__GNUC__) && !defined(__clang__)
0671 if (std::is_unsigned<char>::value) {
0672 const __m128i mask = _mm_set1_epi8(0x80);
0673 const __m128i diff = _mm_subs_epi8(b, a);
0674 return _mm_cmpeq_epi8(_mm_and_si128(diff, mask), mask);
0675 }
0676 #endif
0677 return _mm_cmpgt_epi8(a, b);
0678 }
0679
0680 struct GroupSse2Impl {
0681 static constexpr size_t kWidth = 16;
0682
0683 explicit GroupSse2Impl(const ctrl_t* pos) {
0684 ctrl = _mm_loadu_si128(reinterpret_cast<const __m128i*>(pos));
0685 }
0686
0687
0688 BitMask<uint16_t, kWidth> Match(h2_t hash) const {
0689 auto match = _mm_set1_epi8(static_cast<char>(hash));
0690 BitMask<uint16_t, kWidth> result = BitMask<uint16_t, kWidth>(0);
0691 result = BitMask<uint16_t, kWidth>(
0692 static_cast<uint16_t>(_mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl))));
0693 return result;
0694 }
0695
0696
0697 NonIterableBitMask<uint16_t, kWidth> MaskEmpty() const {
0698 #ifdef ABSL_INTERNAL_HAVE_SSSE3
0699
0700 return NonIterableBitMask<uint16_t, kWidth>(
0701 static_cast<uint16_t>(_mm_movemask_epi8(_mm_sign_epi8(ctrl, ctrl))));
0702 #else
0703 auto match = _mm_set1_epi8(static_cast<char>(ctrl_t::kEmpty));
0704 return NonIterableBitMask<uint16_t, kWidth>(
0705 static_cast<uint16_t>(_mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl))));
0706 #endif
0707 }
0708
0709
0710
0711
0712 BitMask<uint16_t, kWidth> MaskFull() const {
0713 return BitMask<uint16_t, kWidth>(
0714 static_cast<uint16_t>(_mm_movemask_epi8(ctrl) ^ 0xffff));
0715 }
0716
0717
0718
0719
0720 auto MaskNonFull() const {
0721 return BitMask<uint16_t, kWidth>(
0722 static_cast<uint16_t>(_mm_movemask_epi8(ctrl)));
0723 }
0724
0725
0726 NonIterableBitMask<uint16_t, kWidth> MaskEmptyOrDeleted() const {
0727 auto special = _mm_set1_epi8(static_cast<char>(ctrl_t::kSentinel));
0728 return NonIterableBitMask<uint16_t, kWidth>(static_cast<uint16_t>(
0729 _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl))));
0730 }
0731
0732
0733 uint32_t CountLeadingEmptyOrDeleted() const {
0734 auto special = _mm_set1_epi8(static_cast<char>(ctrl_t::kSentinel));
0735 return TrailingZeros(static_cast<uint32_t>(
0736 _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1));
0737 }
0738
0739 void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
0740 auto msbs = _mm_set1_epi8(static_cast<char>(-128));
0741 auto x126 = _mm_set1_epi8(126);
0742 #ifdef ABSL_INTERNAL_HAVE_SSSE3
0743 auto res = _mm_or_si128(_mm_shuffle_epi8(x126, ctrl), msbs);
0744 #else
0745 auto zero = _mm_setzero_si128();
0746 auto special_mask = _mm_cmpgt_epi8_fixed(zero, ctrl);
0747 auto res = _mm_or_si128(msbs, _mm_andnot_si128(special_mask, x126));
0748 #endif
0749 _mm_storeu_si128(reinterpret_cast<__m128i*>(dst), res);
0750 }
0751
0752 __m128i ctrl;
0753 };
0754 #endif
0755
0756 #if defined(ABSL_INTERNAL_HAVE_ARM_NEON) && defined(ABSL_IS_LITTLE_ENDIAN)
0757 struct GroupAArch64Impl {
0758 static constexpr size_t kWidth = 8;
0759
0760 explicit GroupAArch64Impl(const ctrl_t* pos) {
0761 ctrl = vld1_u8(reinterpret_cast<const uint8_t*>(pos));
0762 }
0763
0764 auto Match(h2_t hash) const {
0765 uint8x8_t dup = vdup_n_u8(hash);
0766 auto mask = vceq_u8(ctrl, dup);
0767 return BitMask<uint64_t, kWidth, 3,
0768 true>(
0769 vget_lane_u64(vreinterpret_u64_u8(mask), 0));
0770 }
0771
0772 NonIterableBitMask<uint64_t, kWidth, 3> MaskEmpty() const {
0773 uint64_t mask =
0774 vget_lane_u64(vreinterpret_u64_u8(vceq_s8(
0775 vdup_n_s8(static_cast<int8_t>(ctrl_t::kEmpty)),
0776 vreinterpret_s8_u8(ctrl))),
0777 0);
0778 return NonIterableBitMask<uint64_t, kWidth, 3>(mask);
0779 }
0780
0781
0782
0783
0784 auto MaskFull() const {
0785 uint64_t mask = vget_lane_u64(
0786 vreinterpret_u64_u8(vcge_s8(vreinterpret_s8_u8(ctrl),
0787 vdup_n_s8(static_cast<int8_t>(0)))),
0788 0);
0789 return BitMask<uint64_t, kWidth, 3,
0790 true>(mask);
0791 }
0792
0793
0794
0795
0796 auto MaskNonFull() const {
0797 uint64_t mask = vget_lane_u64(
0798 vreinterpret_u64_u8(vclt_s8(vreinterpret_s8_u8(ctrl),
0799 vdup_n_s8(static_cast<int8_t>(0)))),
0800 0);
0801 return BitMask<uint64_t, kWidth, 3,
0802 true>(mask);
0803 }
0804
0805 NonIterableBitMask<uint64_t, kWidth, 3> MaskEmptyOrDeleted() const {
0806 uint64_t mask =
0807 vget_lane_u64(vreinterpret_u64_u8(vcgt_s8(
0808 vdup_n_s8(static_cast<int8_t>(ctrl_t::kSentinel)),
0809 vreinterpret_s8_u8(ctrl))),
0810 0);
0811 return NonIterableBitMask<uint64_t, kWidth, 3>(mask);
0812 }
0813
0814 uint32_t CountLeadingEmptyOrDeleted() const {
0815 uint64_t mask =
0816 vget_lane_u64(vreinterpret_u64_u8(vcle_s8(
0817 vdup_n_s8(static_cast<int8_t>(ctrl_t::kSentinel)),
0818 vreinterpret_s8_u8(ctrl))),
0819 0);
0820
0821
0822
0823
0824 return static_cast<uint32_t>(countr_zero(mask)) >> 3;
0825 }
0826
0827 void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
0828 uint64_t mask = vget_lane_u64(vreinterpret_u64_u8(ctrl), 0);
0829 constexpr uint64_t slsbs = 0x0202020202020202ULL;
0830 constexpr uint64_t midbs = 0x7e7e7e7e7e7e7e7eULL;
0831 auto x = slsbs & (mask >> 6);
0832 auto res = (x + midbs) | kMsbs8Bytes;
0833 little_endian::Store64(dst, res);
0834 }
0835
0836 uint8x8_t ctrl;
0837 };
0838 #endif
0839
0840 struct GroupPortableImpl {
0841 static constexpr size_t kWidth = 8;
0842
0843 explicit GroupPortableImpl(const ctrl_t* pos)
0844 : ctrl(little_endian::Load64(pos)) {}
0845
0846 BitMask<uint64_t, kWidth, 3> Match(h2_t hash) const {
0847
0848
0849
0850
0851
0852
0853
0854
0855
0856
0857
0858
0859
0860 constexpr uint64_t lsbs = 0x0101010101010101ULL;
0861 auto x = ctrl ^ (lsbs * hash);
0862 return BitMask<uint64_t, kWidth, 3>((x - lsbs) & ~x & kMsbs8Bytes);
0863 }
0864
0865 NonIterableBitMask<uint64_t, kWidth, 3> MaskEmpty() const {
0866 return NonIterableBitMask<uint64_t, kWidth, 3>((ctrl & ~(ctrl << 6)) &
0867 kMsbs8Bytes);
0868 }
0869
0870
0871
0872
0873 BitMask<uint64_t, kWidth, 3> MaskFull() const {
0874 return BitMask<uint64_t, kWidth, 3>((ctrl ^ kMsbs8Bytes) & kMsbs8Bytes);
0875 }
0876
0877
0878
0879
0880 auto MaskNonFull() const {
0881 return BitMask<uint64_t, kWidth, 3>(ctrl & kMsbs8Bytes);
0882 }
0883
0884 NonIterableBitMask<uint64_t, kWidth, 3> MaskEmptyOrDeleted() const {
0885 return NonIterableBitMask<uint64_t, kWidth, 3>((ctrl & ~(ctrl << 7)) &
0886 kMsbs8Bytes);
0887 }
0888
0889 uint32_t CountLeadingEmptyOrDeleted() const {
0890
0891
0892 constexpr uint64_t bits = 0x0101010101010101ULL;
0893 return static_cast<uint32_t>(countr_zero((ctrl | ~(ctrl >> 7)) & bits) >>
0894 3);
0895 }
0896
0897 void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
0898 constexpr uint64_t lsbs = 0x0101010101010101ULL;
0899 auto x = ctrl & kMsbs8Bytes;
0900 auto res = (~x + (x >> 7)) & ~lsbs;
0901 little_endian::Store64(dst, res);
0902 }
0903
0904 uint64_t ctrl;
0905 };
0906
0907 #ifdef ABSL_INTERNAL_HAVE_SSE2
0908 using Group = GroupSse2Impl;
0909 using GroupFullEmptyOrDeleted = GroupSse2Impl;
0910 #elif defined(ABSL_INTERNAL_HAVE_ARM_NEON) && defined(ABSL_IS_LITTLE_ENDIAN)
0911 using Group = GroupAArch64Impl;
0912
0913
0914
0915
0916
0917
0918
0919
0920 using GroupFullEmptyOrDeleted = GroupPortableImpl;
0921 #else
0922 using Group = GroupPortableImpl;
0923 using GroupFullEmptyOrDeleted = GroupPortableImpl;
0924 #endif
0925
0926
0927
0928
0929
0930
0931
0932
0933 inline size_t RehashProbabilityConstant() { return 16; }
0934
0935 class CommonFieldsGenerationInfoEnabled {
0936
0937
0938
0939
0940 static constexpr size_t kReservedGrowthJustRanOut =
0941 (std::numeric_limits<size_t>::max)();
0942
0943 public:
0944 CommonFieldsGenerationInfoEnabled() = default;
0945 CommonFieldsGenerationInfoEnabled(CommonFieldsGenerationInfoEnabled&& that)
0946 : reserved_growth_(that.reserved_growth_),
0947 reservation_size_(that.reservation_size_),
0948 generation_(that.generation_) {
0949 that.reserved_growth_ = 0;
0950 that.reservation_size_ = 0;
0951 that.generation_ = EmptyGeneration();
0952 }
0953 CommonFieldsGenerationInfoEnabled& operator=(
0954 CommonFieldsGenerationInfoEnabled&&) = default;
0955
0956
0957
0958
0959
0960 bool should_rehash_for_bug_detection_on_insert(const ctrl_t* ctrl,
0961 size_t capacity) const;
0962
0963 bool should_rehash_for_bug_detection_on_move(const ctrl_t* ctrl,
0964 size_t capacity) const;
0965 void maybe_increment_generation_on_insert() {
0966 if (reserved_growth_ == kReservedGrowthJustRanOut) reserved_growth_ = 0;
0967
0968 if (reserved_growth_ > 0) {
0969 if (--reserved_growth_ == 0) reserved_growth_ = kReservedGrowthJustRanOut;
0970 } else {
0971 increment_generation();
0972 }
0973 }
0974 void increment_generation() { *generation_ = NextGeneration(*generation_); }
0975 void reset_reserved_growth(size_t reservation, size_t size) {
0976 reserved_growth_ = reservation - size;
0977 }
0978 size_t reserved_growth() const { return reserved_growth_; }
0979 void set_reserved_growth(size_t r) { reserved_growth_ = r; }
0980 size_t reservation_size() const { return reservation_size_; }
0981 void set_reservation_size(size_t r) { reservation_size_ = r; }
0982 GenerationType generation() const { return *generation_; }
0983 void set_generation(GenerationType g) { *generation_ = g; }
0984 GenerationType* generation_ptr() const { return generation_; }
0985 void set_generation_ptr(GenerationType* g) { generation_ = g; }
0986
0987 private:
0988
0989
0990
0991
0992 size_t reserved_growth_ = 0;
0993
0994
0995
0996 size_t reservation_size_ = 0;
0997
0998
0999
1000
1001
1002
1003
1004
1005
1006 GenerationType* generation_ = EmptyGeneration();
1007 };
1008
1009 class CommonFieldsGenerationInfoDisabled {
1010 public:
1011 CommonFieldsGenerationInfoDisabled() = default;
1012 CommonFieldsGenerationInfoDisabled(CommonFieldsGenerationInfoDisabled&&) =
1013 default;
1014 CommonFieldsGenerationInfoDisabled& operator=(
1015 CommonFieldsGenerationInfoDisabled&&) = default;
1016
1017 bool should_rehash_for_bug_detection_on_insert(const ctrl_t*, size_t) const {
1018 return false;
1019 }
1020 bool should_rehash_for_bug_detection_on_move(const ctrl_t*, size_t) const {
1021 return false;
1022 }
1023 void maybe_increment_generation_on_insert() {}
1024 void increment_generation() {}
1025 void reset_reserved_growth(size_t, size_t) {}
1026 size_t reserved_growth() const { return 0; }
1027 void set_reserved_growth(size_t) {}
1028 size_t reservation_size() const { return 0; }
1029 void set_reservation_size(size_t) {}
1030 GenerationType generation() const { return 0; }
1031 void set_generation(GenerationType) {}
1032 GenerationType* generation_ptr() const { return nullptr; }
1033 void set_generation_ptr(GenerationType*) {}
1034 };
1035
1036 class HashSetIteratorGenerationInfoEnabled {
1037 public:
1038 HashSetIteratorGenerationInfoEnabled() = default;
1039 explicit HashSetIteratorGenerationInfoEnabled(
1040 const GenerationType* generation_ptr)
1041 : generation_ptr_(generation_ptr), generation_(*generation_ptr) {}
1042
1043 GenerationType generation() const { return generation_; }
1044 void reset_generation() { generation_ = *generation_ptr_; }
1045 const GenerationType* generation_ptr() const { return generation_ptr_; }
1046 void set_generation_ptr(const GenerationType* ptr) { generation_ptr_ = ptr; }
1047
1048 private:
1049 const GenerationType* generation_ptr_ = EmptyGeneration();
1050 GenerationType generation_ = *generation_ptr_;
1051 };
1052
1053 class HashSetIteratorGenerationInfoDisabled {
1054 public:
1055 HashSetIteratorGenerationInfoDisabled() = default;
1056 explicit HashSetIteratorGenerationInfoDisabled(const GenerationType*) {}
1057
1058 GenerationType generation() const { return 0; }
1059 void reset_generation() {}
1060 const GenerationType* generation_ptr() const { return nullptr; }
1061 void set_generation_ptr(const GenerationType*) {}
1062 };
1063
1064 #ifdef ABSL_SWISSTABLE_ENABLE_GENERATIONS
1065 using CommonFieldsGenerationInfo = CommonFieldsGenerationInfoEnabled;
1066 using HashSetIteratorGenerationInfo = HashSetIteratorGenerationInfoEnabled;
1067 #else
1068 using CommonFieldsGenerationInfo = CommonFieldsGenerationInfoDisabled;
1069 using HashSetIteratorGenerationInfo = HashSetIteratorGenerationInfoDisabled;
1070 #endif
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092 class GrowthInfo {
1093 public:
1094
1095 GrowthInfo() = default;
1096
1097
1098
1099 void InitGrowthLeftNoDeleted(size_t growth_left) {
1100 growth_left_info_ = growth_left;
1101 }
1102
1103
1104 void OverwriteFullAsEmpty() { ++growth_left_info_; }
1105
1106
1107 void OverwriteEmptyAsFull() {
1108 assert(GetGrowthLeft() > 0);
1109 --growth_left_info_;
1110 }
1111
1112
1113 void OverwriteManyEmptyAsFull(size_t cnt) {
1114 assert(GetGrowthLeft() >= cnt);
1115 growth_left_info_ -= cnt;
1116 }
1117
1118
1119 void OverwriteControlAsFull(ctrl_t ctrl) {
1120 assert(GetGrowthLeft() >= static_cast<size_t>(IsEmpty(ctrl)));
1121 growth_left_info_ -= static_cast<size_t>(IsEmpty(ctrl));
1122 }
1123
1124
1125 void OverwriteFullAsDeleted() { growth_left_info_ |= kDeletedBit; }
1126
1127
1128
1129
1130 bool HasNoDeletedAndGrowthLeft() const {
1131 return static_cast<std::make_signed_t<size_t>>(growth_left_info_) > 0;
1132 }
1133
1134
1135
1136
1137 bool HasNoGrowthLeftAndNoDeleted() const { return growth_left_info_ == 0; }
1138
1139
1140 bool HasNoDeleted() const {
1141 return static_cast<std::make_signed_t<size_t>>(growth_left_info_) >= 0;
1142 }
1143
1144
1145 size_t GetGrowthLeft() const { return growth_left_info_ & kGrowthLeftMask; }
1146
1147 private:
1148 static constexpr size_t kGrowthLeftMask = ((~size_t{}) >> 1);
1149 static constexpr size_t kDeletedBit = ~kGrowthLeftMask;
1150
1151 size_t growth_left_info_;
1152 };
1153
1154 static_assert(sizeof(GrowthInfo) == sizeof(size_t), "");
1155 static_assert(alignof(GrowthInfo) == alignof(size_t), "");
1156
1157
1158
1159
1160 inline bool IsValidCapacity(size_t n) { return ((n + 1) & n) == 0 && n > 0; }
1161
1162
1163
1164
1165
1166
1167 constexpr size_t NumClonedBytes() { return Group::kWidth - 1; }
1168
1169
1170 constexpr size_t NumControlBytes(size_t capacity) {
1171 return capacity + 1 + NumClonedBytes();
1172 }
1173
1174
1175
1176 inline static size_t ControlOffset(bool has_infoz) {
1177 return (has_infoz ? sizeof(HashtablezInfoHandle) : 0) + sizeof(GrowthInfo);
1178 }
1179
1180
1181 class RawHashSetLayout {
1182 public:
1183 explicit RawHashSetLayout(size_t capacity, size_t slot_align, bool has_infoz)
1184 : capacity_(capacity),
1185 control_offset_(ControlOffset(has_infoz)),
1186 generation_offset_(control_offset_ + NumControlBytes(capacity)),
1187 slot_offset_(
1188 (generation_offset_ + NumGenerationBytes() + slot_align - 1) &
1189 (~slot_align + 1)) {
1190 assert(IsValidCapacity(capacity));
1191 }
1192
1193
1194 size_t capacity() const { return capacity_; }
1195
1196
1197
1198 size_t control_offset() const { return control_offset_; }
1199
1200
1201
1202 size_t generation_offset() const { return generation_offset_; }
1203
1204
1205
1206 size_t slot_offset() const { return slot_offset_; }
1207
1208
1209
1210 size_t alloc_size(size_t slot_size) const {
1211 return slot_offset_ + capacity_ * slot_size;
1212 }
1213
1214 private:
1215 size_t capacity_;
1216 size_t control_offset_;
1217 size_t generation_offset_;
1218 size_t slot_offset_;
1219 };
1220
1221 struct HashtableFreeFunctionsAccess;
1222
1223
1224
1225
1226
1227
1228
1229
1230 constexpr size_t SooCapacity() { return 1; }
1231
1232 struct soo_tag_t {};
1233
1234 struct full_soo_tag_t {};
1235
1236
1237
1238
1239
1240 #if !defined(__clang__) && defined(__GNUC__)
1241 #define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(x) \
1242 _Pragma("GCC diagnostic push") \
1243 _Pragma("GCC diagnostic ignored \"-Wmaybe-uninitialized\"") \
1244 _Pragma("GCC diagnostic ignored \"-Wuninitialized\"") x; \
1245 _Pragma("GCC diagnostic pop")
1246 #define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(x) \
1247 ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(return x)
1248 #else
1249 #define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(x) x
1250 #define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(x) return x
1251 #endif
1252
1253
1254
1255 union MaybeInitializedPtr {
1256 void* get() const { ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(p); }
1257 void set(void* ptr) { p = ptr; }
1258
1259 void* p;
1260 };
1261
1262 struct HeapPtrs {
1263 HeapPtrs() = default;
1264 explicit HeapPtrs(ctrl_t* c) : control(c) {}
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274 ctrl_t* control;
1275
1276
1277
1278
1279 MaybeInitializedPtr slot_array;
1280 };
1281
1282
1283
1284 union HeapOrSoo {
1285 HeapOrSoo() = default;
1286 explicit HeapOrSoo(ctrl_t* c) : heap(c) {}
1287
1288 ctrl_t*& control() {
1289 ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(heap.control);
1290 }
1291 ctrl_t* control() const {
1292 ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(heap.control);
1293 }
1294 MaybeInitializedPtr& slot_array() {
1295 ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(heap.slot_array);
1296 }
1297 MaybeInitializedPtr slot_array() const {
1298 ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(heap.slot_array);
1299 }
1300 void* get_soo_data() {
1301 ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(soo_data);
1302 }
1303 const void* get_soo_data() const {
1304 ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(soo_data);
1305 }
1306
1307 HeapPtrs heap;
1308 unsigned char soo_data[sizeof(HeapPtrs)];
1309 };
1310
1311
1312
1313
1314 class CommonFields : public CommonFieldsGenerationInfo {
1315 public:
1316 CommonFields() : capacity_(0), size_(0), heap_or_soo_(EmptyGroup()) {}
1317 explicit CommonFields(soo_tag_t) : capacity_(SooCapacity()), size_(0) {}
1318 explicit CommonFields(full_soo_tag_t)
1319 : capacity_(SooCapacity()), size_(size_t{1} << HasInfozShift()) {}
1320
1321
1322 CommonFields(const CommonFields&) = delete;
1323 CommonFields& operator=(const CommonFields&) = delete;
1324
1325
1326 CommonFields(CommonFields&& that) = default;
1327 CommonFields& operator=(CommonFields&&) = default;
1328
1329 template <bool kSooEnabled>
1330 static CommonFields CreateDefault() {
1331 return kSooEnabled ? CommonFields{soo_tag_t{}} : CommonFields{};
1332 }
1333
1334
1335 const void* soo_data() const { return heap_or_soo_.get_soo_data(); }
1336 void* soo_data() { return heap_or_soo_.get_soo_data(); }
1337
1338 HeapOrSoo heap_or_soo() const { return heap_or_soo_; }
1339 const HeapOrSoo& heap_or_soo_ref() const { return heap_or_soo_; }
1340
1341 ctrl_t* control() const { return heap_or_soo_.control(); }
1342 void set_control(ctrl_t* c) { heap_or_soo_.control() = c; }
1343 void* backing_array_start() const {
1344
1345 assert(reinterpret_cast<uintptr_t>(control()) % alignof(size_t) == 0);
1346 return control() - ControlOffset(has_infoz());
1347 }
1348
1349
1350 void* slot_array() const { return heap_or_soo_.slot_array().get(); }
1351 MaybeInitializedPtr slots_union() const { return heap_or_soo_.slot_array(); }
1352 void set_slots(void* s) { heap_or_soo_.slot_array().set(s); }
1353
1354
1355 size_t size() const { return size_ >> HasInfozShift(); }
1356 void set_size(size_t s) {
1357 size_ = (s << HasInfozShift()) | (size_ & HasInfozMask());
1358 }
1359 void set_empty_soo() {
1360 AssertInSooMode();
1361 size_ = 0;
1362 }
1363 void set_full_soo() {
1364 AssertInSooMode();
1365 size_ = size_t{1} << HasInfozShift();
1366 }
1367 void increment_size() {
1368 assert(size() < capacity());
1369 size_ += size_t{1} << HasInfozShift();
1370 }
1371 void decrement_size() {
1372 assert(size() > 0);
1373 size_ -= size_t{1} << HasInfozShift();
1374 }
1375
1376
1377 size_t capacity() const { return capacity_; }
1378 void set_capacity(size_t c) {
1379 assert(c == 0 || IsValidCapacity(c));
1380 capacity_ = c;
1381 }
1382
1383
1384
1385
1386
1387 size_t growth_left() const { return growth_info().GetGrowthLeft(); }
1388
1389 GrowthInfo& growth_info() {
1390 auto* gl_ptr = reinterpret_cast<GrowthInfo*>(control()) - 1;
1391 assert(reinterpret_cast<uintptr_t>(gl_ptr) % alignof(GrowthInfo) == 0);
1392 return *gl_ptr;
1393 }
1394 GrowthInfo growth_info() const {
1395 return const_cast<CommonFields*>(this)->growth_info();
1396 }
1397
1398 bool has_infoz() const {
1399 return ABSL_PREDICT_FALSE((size_ & HasInfozMask()) != 0);
1400 }
1401 void set_has_infoz(bool has_infoz) {
1402 size_ = (size() << HasInfozShift()) | static_cast<size_t>(has_infoz);
1403 }
1404
1405 HashtablezInfoHandle infoz() {
1406 return has_infoz()
1407 ? *reinterpret_cast<HashtablezInfoHandle*>(backing_array_start())
1408 : HashtablezInfoHandle();
1409 }
1410 void set_infoz(HashtablezInfoHandle infoz) {
1411 assert(has_infoz());
1412 *reinterpret_cast<HashtablezInfoHandle*>(backing_array_start()) = infoz;
1413 }
1414
1415 bool should_rehash_for_bug_detection_on_insert() const {
1416 return CommonFieldsGenerationInfo::
1417 should_rehash_for_bug_detection_on_insert(control(), capacity());
1418 }
1419 bool should_rehash_for_bug_detection_on_move() const {
1420 return CommonFieldsGenerationInfo::should_rehash_for_bug_detection_on_move(
1421 control(), capacity());
1422 }
1423 void reset_reserved_growth(size_t reservation) {
1424 CommonFieldsGenerationInfo::reset_reserved_growth(reservation, size());
1425 }
1426
1427
1428 size_t alloc_size(size_t slot_size, size_t slot_align) const {
1429 return RawHashSetLayout(capacity(), slot_align, has_infoz())
1430 .alloc_size(slot_size);
1431 }
1432
1433
1434 void move_non_heap_or_soo_fields(CommonFields& that) {
1435 static_cast<CommonFieldsGenerationInfo&>(*this) =
1436 std::move(static_cast<CommonFieldsGenerationInfo&>(that));
1437 capacity_ = that.capacity_;
1438 size_ = that.size_;
1439 }
1440
1441
1442 size_t TombstonesCount() const {
1443 return static_cast<size_t>(
1444 std::count(control(), control() + capacity(), ctrl_t::kDeleted));
1445 }
1446
1447 private:
1448
1449 static constexpr size_t HasInfozShift() { return 1; }
1450 static constexpr size_t HasInfozMask() {
1451 return (size_t{1} << HasInfozShift()) - 1;
1452 }
1453
1454
1455
1456 void AssertInSooMode() const {
1457 assert(capacity() == SooCapacity());
1458 assert(!has_infoz());
1459 }
1460
1461
1462
1463
1464
1465
1466
1467 size_t capacity_;
1468
1469
1470
1471
1472
1473 size_t size_;
1474
1475
1476 HeapOrSoo heap_or_soo_;
1477 };
1478
1479 template <class Policy, class Hash, class Eq, class Alloc>
1480 class raw_hash_set;
1481
1482
1483 inline size_t NextCapacity(size_t n) {
1484 assert(IsValidCapacity(n) || n == 0);
1485 return n * 2 + 1;
1486 }
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496 void ConvertDeletedToEmptyAndFullToDeleted(ctrl_t* ctrl, size_t capacity);
1497
1498
1499 inline size_t NormalizeCapacity(size_t n) {
1500 return n ? ~size_t{} >> countl_zero(n) : 1;
1501 }
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513 inline size_t CapacityToGrowth(size_t capacity) {
1514 assert(IsValidCapacity(capacity));
1515
1516 if (Group::kWidth == 8 && capacity == 7) {
1517
1518 return 6;
1519 }
1520 return capacity - capacity / 8;
1521 }
1522
1523
1524
1525
1526
1527
1528 inline size_t GrowthToLowerboundCapacity(size_t growth) {
1529
1530 if (Group::kWidth == 8 && growth == 7) {
1531
1532 return 8;
1533 }
1534 return growth + static_cast<size_t>((static_cast<int64_t>(growth) - 1) / 7);
1535 }
1536
1537 template <class InputIter>
1538 size_t SelectBucketCountForIterRange(InputIter first, InputIter last,
1539 size_t bucket_count) {
1540 if (bucket_count != 0) {
1541 return bucket_count;
1542 }
1543 using InputIterCategory =
1544 typename std::iterator_traits<InputIter>::iterator_category;
1545 if (std::is_base_of<std::random_access_iterator_tag,
1546 InputIterCategory>::value) {
1547 return GrowthToLowerboundCapacity(
1548 static_cast<size_t>(std::distance(first, last)));
1549 }
1550 return 0;
1551 }
1552
1553 constexpr bool SwisstableDebugEnabled() {
1554 #if defined(ABSL_SWISSTABLE_ENABLE_GENERATIONS) || \
1555 ABSL_OPTION_HARDENED == 1 || !defined(NDEBUG)
1556 return true;
1557 #else
1558 return false;
1559 #endif
1560 }
1561
1562 inline void AssertIsFull(const ctrl_t* ctrl, GenerationType generation,
1563 const GenerationType* generation_ptr,
1564 const char* operation) {
1565 if (!SwisstableDebugEnabled()) return;
1566
1567
1568
1569
1570
1571 if (ABSL_PREDICT_FALSE(ctrl == nullptr)) {
1572 ABSL_RAW_LOG(FATAL, "%s called on end() iterator.", operation);
1573 }
1574 if (ABSL_PREDICT_FALSE(ctrl == EmptyGroup())) {
1575 ABSL_RAW_LOG(FATAL, "%s called on default-constructed iterator.",
1576 operation);
1577 }
1578 if (SwisstableGenerationsEnabled()) {
1579 if (ABSL_PREDICT_FALSE(generation != *generation_ptr)) {
1580 ABSL_RAW_LOG(FATAL,
1581 "%s called on invalid iterator. The table could have "
1582 "rehashed or moved since this iterator was initialized.",
1583 operation);
1584 }
1585 if (ABSL_PREDICT_FALSE(!IsFull(*ctrl))) {
1586 ABSL_RAW_LOG(
1587 FATAL,
1588 "%s called on invalid iterator. The element was likely erased.",
1589 operation);
1590 }
1591 } else {
1592 if (ABSL_PREDICT_FALSE(!IsFull(*ctrl))) {
1593 ABSL_RAW_LOG(
1594 FATAL,
1595 "%s called on invalid iterator. The element might have been erased "
1596 "or the table might have rehashed. Consider running with "
1597 "--config=asan to diagnose rehashing issues.",
1598 operation);
1599 }
1600 }
1601 }
1602
1603
1604 inline void AssertIsValidForComparison(const ctrl_t* ctrl,
1605 GenerationType generation,
1606 const GenerationType* generation_ptr) {
1607 if (!SwisstableDebugEnabled()) return;
1608 const bool ctrl_is_valid_for_comparison =
1609 ctrl == nullptr || ctrl == EmptyGroup() || IsFull(*ctrl);
1610 if (SwisstableGenerationsEnabled()) {
1611 if (ABSL_PREDICT_FALSE(generation != *generation_ptr)) {
1612 ABSL_RAW_LOG(FATAL,
1613 "Invalid iterator comparison. The table could have rehashed "
1614 "or moved since this iterator was initialized.");
1615 }
1616 if (ABSL_PREDICT_FALSE(!ctrl_is_valid_for_comparison)) {
1617 ABSL_RAW_LOG(
1618 FATAL, "Invalid iterator comparison. The element was likely erased.");
1619 }
1620 } else {
1621 ABSL_HARDENING_ASSERT(
1622 ctrl_is_valid_for_comparison &&
1623 "Invalid iterator comparison. The element might have been erased or "
1624 "the table might have rehashed. Consider running with --config=asan to "
1625 "diagnose rehashing issues.");
1626 }
1627 }
1628
1629
1630
1631
1632
1633 inline bool AreItersFromSameContainer(const ctrl_t* ctrl_a,
1634 const ctrl_t* ctrl_b,
1635 const void* const& slot_a,
1636 const void* const& slot_b) {
1637
1638 if (ctrl_a == nullptr || ctrl_b == nullptr) return true;
1639 const bool a_is_soo = IsSooControl(ctrl_a);
1640 if (a_is_soo != IsSooControl(ctrl_b)) return false;
1641 if (a_is_soo) return slot_a == slot_b;
1642
1643 const void* low_slot = slot_a;
1644 const void* hi_slot = slot_b;
1645 if (ctrl_a > ctrl_b) {
1646 std::swap(ctrl_a, ctrl_b);
1647 std::swap(low_slot, hi_slot);
1648 }
1649 return ctrl_b < low_slot && low_slot <= hi_slot;
1650 }
1651
1652
1653
1654
1655 inline void AssertSameContainer(const ctrl_t* ctrl_a, const ctrl_t* ctrl_b,
1656 const void* const& slot_a,
1657 const void* const& slot_b,
1658 const GenerationType* generation_ptr_a,
1659 const GenerationType* generation_ptr_b) {
1660 if (!SwisstableDebugEnabled()) return;
1661
1662
1663
1664
1665
1666
1667
1668
1669 const auto fail_if = [](bool is_invalid, const char* message) {
1670 if (ABSL_PREDICT_FALSE(is_invalid)) {
1671 ABSL_RAW_LOG(FATAL, "Invalid iterator comparison. %s", message);
1672 }
1673 };
1674
1675 const bool a_is_default = ctrl_a == EmptyGroup();
1676 const bool b_is_default = ctrl_b == EmptyGroup();
1677 if (a_is_default && b_is_default) return;
1678 fail_if(a_is_default != b_is_default,
1679 "Comparing default-constructed hashtable iterator with a "
1680 "non-default-constructed hashtable iterator.");
1681
1682 if (SwisstableGenerationsEnabled()) {
1683 if (ABSL_PREDICT_TRUE(generation_ptr_a == generation_ptr_b)) return;
1684
1685
1686 const bool a_is_soo = IsSooControl(ctrl_a);
1687 const bool b_is_soo = IsSooControl(ctrl_b);
1688 fail_if(a_is_soo != b_is_soo || (a_is_soo && b_is_soo),
1689 "Comparing iterators from different hashtables.");
1690
1691 const bool a_is_empty = IsEmptyGeneration(generation_ptr_a);
1692 const bool b_is_empty = IsEmptyGeneration(generation_ptr_b);
1693 fail_if(a_is_empty != b_is_empty,
1694 "Comparing an iterator from an empty hashtable with an iterator "
1695 "from a non-empty hashtable.");
1696 fail_if(a_is_empty && b_is_empty,
1697 "Comparing iterators from different empty hashtables.");
1698
1699 const bool a_is_end = ctrl_a == nullptr;
1700 const bool b_is_end = ctrl_b == nullptr;
1701 fail_if(a_is_end || b_is_end,
1702 "Comparing iterator with an end() iterator from a different "
1703 "hashtable.");
1704 fail_if(true, "Comparing non-end() iterators from different hashtables.");
1705 } else {
1706 ABSL_HARDENING_ASSERT(
1707 AreItersFromSameContainer(ctrl_a, ctrl_b, slot_a, slot_b) &&
1708 "Invalid iterator comparison. The iterators may be from different "
1709 "containers or the container might have rehashed or moved. Consider "
1710 "running with --config=asan to diagnose issues.");
1711 }
1712 }
1713
1714 struct FindInfo {
1715 size_t offset;
1716 size_t probe_length;
1717 };
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731 inline bool is_small(size_t capacity) { return capacity < Group::kWidth - 1; }
1732
1733
1734
1735 inline bool is_single_group(size_t capacity) {
1736 return capacity <= Group::kWidth;
1737 }
1738
1739
1740 inline probe_seq<Group::kWidth> probe(const ctrl_t* ctrl, const size_t capacity,
1741 size_t hash) {
1742 return probe_seq<Group::kWidth>(H1(hash, ctrl), capacity);
1743 }
1744 inline probe_seq<Group::kWidth> probe(const CommonFields& common, size_t hash) {
1745 return probe(common.control(), common.capacity(), hash);
1746 }
1747
1748
1749
1750
1751
1752
1753
1754
1755 template <typename = void>
1756 inline FindInfo find_first_non_full(const CommonFields& common, size_t hash) {
1757 auto seq = probe(common, hash);
1758 const ctrl_t* ctrl = common.control();
1759 if (IsEmptyOrDeleted(ctrl[seq.offset()]) &&
1760 !ShouldInsertBackwards(common.capacity(), hash, ctrl)) {
1761 return {seq.offset(), 0};
1762 }
1763 while (true) {
1764 GroupFullEmptyOrDeleted g{ctrl + seq.offset()};
1765 auto mask = g.MaskEmptyOrDeleted();
1766 if (mask) {
1767 return {
1768 seq.offset(GetInsertionOffset(mask, common.capacity(), hash, ctrl)),
1769 seq.index()};
1770 }
1771 seq.next();
1772 assert(seq.index() <= common.capacity() && "full table!");
1773 }
1774 }
1775
1776
1777
1778
1779 extern template FindInfo find_first_non_full(const CommonFields&, size_t);
1780
1781
1782
1783 FindInfo find_first_non_full_outofline(const CommonFields&, size_t);
1784
1785 inline void ResetGrowthLeft(CommonFields& common) {
1786 common.growth_info().InitGrowthLeftNoDeleted(
1787 CapacityToGrowth(common.capacity()) - common.size());
1788 }
1789
1790
1791
1792 inline void ResetCtrl(CommonFields& common, size_t slot_size) {
1793 const size_t capacity = common.capacity();
1794 ctrl_t* ctrl = common.control();
1795 std::memset(ctrl, static_cast<int8_t>(ctrl_t::kEmpty),
1796 capacity + 1 + NumClonedBytes());
1797 ctrl[capacity] = ctrl_t::kSentinel;
1798 SanitizerPoisonMemoryRegion(common.slot_array(), slot_size * capacity);
1799 }
1800
1801
1802 inline void DoSanitizeOnSetCtrl(const CommonFields& c, size_t i, ctrl_t h,
1803 size_t slot_size) {
1804 assert(i < c.capacity());
1805 auto* slot_i = static_cast<const char*>(c.slot_array()) + i * slot_size;
1806 if (IsFull(h)) {
1807 SanitizerUnpoisonMemoryRegion(slot_i, slot_size);
1808 } else {
1809 SanitizerPoisonMemoryRegion(slot_i, slot_size);
1810 }
1811 }
1812
1813
1814
1815
1816
1817 inline void SetCtrl(const CommonFields& c, size_t i, ctrl_t h,
1818 size_t slot_size) {
1819 DoSanitizeOnSetCtrl(c, i, h, slot_size);
1820 ctrl_t* ctrl = c.control();
1821 ctrl[i] = h;
1822 ctrl[((i - NumClonedBytes()) & c.capacity()) +
1823 (NumClonedBytes() & c.capacity())] = h;
1824 }
1825
1826 inline void SetCtrl(const CommonFields& c, size_t i, h2_t h, size_t slot_size) {
1827 SetCtrl(c, i, static_cast<ctrl_t>(h), slot_size);
1828 }
1829
1830
1831
1832 inline void SetCtrlInSingleGroupTable(const CommonFields& c, size_t i, ctrl_t h,
1833 size_t slot_size) {
1834 assert(is_single_group(c.capacity()));
1835 DoSanitizeOnSetCtrl(c, i, h, slot_size);
1836 ctrl_t* ctrl = c.control();
1837 ctrl[i] = h;
1838 ctrl[i + c.capacity() + 1] = h;
1839 }
1840
1841 inline void SetCtrlInSingleGroupTable(const CommonFields& c, size_t i, h2_t h,
1842 size_t slot_size) {
1843 SetCtrlInSingleGroupTable(c, i, static_cast<ctrl_t>(h), slot_size);
1844 }
1845
1846
1847 constexpr size_t BackingArrayAlignment(size_t align_of_slot) {
1848 return (std::max)(align_of_slot, alignof(GrowthInfo));
1849 }
1850
1851
1852
1853 inline void* SlotAddress(void* slot_array, size_t slot, size_t slot_size) {
1854 return static_cast<void*>(static_cast<char*>(slot_array) +
1855 (slot * slot_size));
1856 }
1857
1858
1859
1860
1861 template <class SlotType, class Callback>
1862 ABSL_ATTRIBUTE_ALWAYS_INLINE inline void IterateOverFullSlots(
1863 const CommonFields& c, SlotType* slot, Callback cb) {
1864 const size_t cap = c.capacity();
1865 const ctrl_t* ctrl = c.control();
1866 if (is_small(cap)) {
1867
1868
1869
1870
1871
1872
1873
1874 assert(cap <= GroupPortableImpl::kWidth &&
1875 "unexpectedly large small capacity");
1876 static_assert(Group::kWidth >= GroupPortableImpl::kWidth,
1877 "unexpected group width");
1878
1879
1880 const auto mask = GroupPortableImpl(ctrl + cap).MaskFull();
1881 --ctrl;
1882 --slot;
1883 for (uint32_t i : mask) {
1884 cb(ctrl + i, slot + i);
1885 }
1886 return;
1887 }
1888 size_t remaining = c.size();
1889 ABSL_ATTRIBUTE_UNUSED const size_t original_size_for_assert = remaining;
1890 while (remaining != 0) {
1891 for (uint32_t i : GroupFullEmptyOrDeleted(ctrl).MaskFull()) {
1892 assert(IsFull(ctrl[i]) && "hash table was modified unexpectedly");
1893 cb(ctrl + i, slot + i);
1894 --remaining;
1895 }
1896 ctrl += Group::kWidth;
1897 slot += Group::kWidth;
1898 assert((remaining == 0 || *(ctrl - 1) != ctrl_t::kSentinel) &&
1899 "hash table was modified unexpectedly");
1900 }
1901
1902
1903 assert(original_size_for_assert >= c.size() &&
1904 "hash table was modified unexpectedly");
1905 }
1906
1907 template <typename CharAlloc>
1908 constexpr bool ShouldSampleHashtablezInfo() {
1909
1910
1911
1912
1913
1914 return std::is_same<CharAlloc, std::allocator<char>>::value;
1915 }
1916
1917 template <bool kSooEnabled>
1918 HashtablezInfoHandle SampleHashtablezInfo(size_t sizeof_slot, size_t sizeof_key,
1919 size_t sizeof_value,
1920 size_t old_capacity, bool was_soo,
1921 HashtablezInfoHandle forced_infoz,
1922 CommonFields& c) {
1923 if (forced_infoz.IsSampled()) return forced_infoz;
1924
1925
1926 if (kSooEnabled && was_soo && c.size() == 0) {
1927 return Sample(sizeof_slot, sizeof_key, sizeof_value, SooCapacity());
1928 }
1929
1930
1931 if (!kSooEnabled && old_capacity == 0) {
1932 return Sample(sizeof_slot, sizeof_key, sizeof_value, 0);
1933 }
1934 return c.infoz();
1935 }
1936
1937
1938
1939
1940
1941 class HashSetResizeHelper {
1942 public:
1943 explicit HashSetResizeHelper(CommonFields& c, bool was_soo, bool had_soo_slot,
1944 HashtablezInfoHandle forced_infoz)
1945 : old_capacity_(c.capacity()),
1946 had_infoz_(c.has_infoz()),
1947 was_soo_(was_soo),
1948 had_soo_slot_(had_soo_slot),
1949 forced_infoz_(forced_infoz) {}
1950
1951
1952
1953
1954
1955
1956
1957
1958 static FindInfo FindFirstNonFullAfterResize(const CommonFields& c,
1959 size_t old_capacity,
1960 size_t hash) {
1961 if (!IsGrowingIntoSingleGroupApplicable(old_capacity, c.capacity())) {
1962 return find_first_non_full(c, hash);
1963 }
1964
1965
1966
1967
1968 size_t offset = probe(c, hash).offset();
1969
1970
1971 if (offset - (old_capacity + 1) >= old_capacity) {
1972
1973 offset = old_capacity / 2;
1974 }
1975 assert(IsEmpty(c.control()[offset]));
1976 return FindInfo{offset, 0};
1977 }
1978
1979 HeapOrSoo& old_heap_or_soo() { return old_heap_or_soo_; }
1980 void* old_soo_data() { return old_heap_or_soo_.get_soo_data(); }
1981 ctrl_t* old_ctrl() const {
1982 assert(!was_soo_);
1983 return old_heap_or_soo_.control();
1984 }
1985 void* old_slots() const {
1986 assert(!was_soo_);
1987 return old_heap_or_soo_.slot_array().get();
1988 }
1989 size_t old_capacity() const { return old_capacity_; }
1990
1991
1992
1993
1994
1995
1996 static size_t SooSlotIndex() { return 1; }
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031 template <typename Alloc, size_t SizeOfSlot, bool TransferUsesMemcpy,
2032 bool SooEnabled, size_t AlignOfSlot>
2033 ABSL_ATTRIBUTE_NOINLINE bool InitializeSlots(CommonFields& c, Alloc alloc,
2034 ctrl_t soo_slot_h2,
2035 size_t key_size,
2036 size_t value_size) {
2037 assert(c.capacity());
2038 HashtablezInfoHandle infoz =
2039 ShouldSampleHashtablezInfo<Alloc>()
2040 ? SampleHashtablezInfo<SooEnabled>(SizeOfSlot, key_size, value_size,
2041 old_capacity_, was_soo_,
2042 forced_infoz_, c)
2043 : HashtablezInfoHandle{};
2044
2045 const bool has_infoz = infoz.IsSampled();
2046 RawHashSetLayout layout(c.capacity(), AlignOfSlot, has_infoz);
2047 char* mem = static_cast<char*>(Allocate<BackingArrayAlignment(AlignOfSlot)>(
2048 &alloc, layout.alloc_size(SizeOfSlot)));
2049 const GenerationType old_generation = c.generation();
2050 c.set_generation_ptr(
2051 reinterpret_cast<GenerationType*>(mem + layout.generation_offset()));
2052 c.set_generation(NextGeneration(old_generation));
2053 c.set_control(reinterpret_cast<ctrl_t*>(mem + layout.control_offset()));
2054 c.set_slots(mem + layout.slot_offset());
2055 ResetGrowthLeft(c);
2056
2057 const bool grow_single_group =
2058 IsGrowingIntoSingleGroupApplicable(old_capacity_, layout.capacity());
2059 if (SooEnabled && was_soo_ && grow_single_group) {
2060 InitControlBytesAfterSoo(c.control(), soo_slot_h2, layout.capacity());
2061 if (TransferUsesMemcpy && had_soo_slot_) {
2062 TransferSlotAfterSoo(c, SizeOfSlot);
2063 }
2064
2065 } else if ((SooEnabled || old_capacity_ != 0) && grow_single_group) {
2066 if (TransferUsesMemcpy) {
2067 GrowSizeIntoSingleGroupTransferable(c, SizeOfSlot);
2068 DeallocateOld<AlignOfSlot>(alloc, SizeOfSlot);
2069 } else {
2070 GrowIntoSingleGroupShuffleControlBytes(c.control(), layout.capacity());
2071 }
2072 } else {
2073 ResetCtrl(c, SizeOfSlot);
2074 }
2075
2076 c.set_has_infoz(has_infoz);
2077 if (has_infoz) {
2078 infoz.RecordStorageChanged(c.size(), layout.capacity());
2079 if ((SooEnabled && was_soo_) || grow_single_group || old_capacity_ == 0) {
2080 infoz.RecordRehash(0);
2081 }
2082 c.set_infoz(infoz);
2083 }
2084 return grow_single_group;
2085 }
2086
2087
2088
2089
2090
2091
2092 template <class PolicyTraits, class Alloc>
2093 void GrowSizeIntoSingleGroup(CommonFields& c, Alloc& alloc_ref) {
2094 assert(old_capacity_ < Group::kWidth / 2);
2095 assert(IsGrowingIntoSingleGroupApplicable(old_capacity_, c.capacity()));
2096 using slot_type = typename PolicyTraits::slot_type;
2097 assert(is_single_group(c.capacity()));
2098
2099 auto* new_slots = static_cast<slot_type*>(c.slot_array());
2100 auto* old_slots_ptr = static_cast<slot_type*>(old_slots());
2101
2102 size_t shuffle_bit = old_capacity_ / 2 + 1;
2103 for (size_t i = 0; i < old_capacity_; ++i) {
2104 if (IsFull(old_ctrl()[i])) {
2105 size_t new_i = i ^ shuffle_bit;
2106 SanitizerUnpoisonMemoryRegion(new_slots + new_i, sizeof(slot_type));
2107 PolicyTraits::transfer(&alloc_ref, new_slots + new_i,
2108 old_slots_ptr + i);
2109 }
2110 }
2111 PoisonSingleGroupEmptySlots(c, sizeof(slot_type));
2112 }
2113
2114
2115 template <size_t AlignOfSlot, class CharAlloc>
2116 void DeallocateOld(CharAlloc alloc_ref, size_t slot_size) {
2117 SanitizerUnpoisonMemoryRegion(old_slots(), slot_size * old_capacity_);
2118 auto layout = RawHashSetLayout(old_capacity_, AlignOfSlot, had_infoz_);
2119 Deallocate<BackingArrayAlignment(AlignOfSlot)>(
2120 &alloc_ref, old_ctrl() - layout.control_offset(),
2121 layout.alloc_size(slot_size));
2122 }
2123
2124 private:
2125
2126 static bool IsGrowingIntoSingleGroupApplicable(size_t old_capacity,
2127 size_t new_capacity) {
2128
2129
2130 return is_single_group(new_capacity) && old_capacity < new_capacity;
2131 }
2132
2133
2134
2135
2136 void GrowSizeIntoSingleGroupTransferable(CommonFields& c, size_t slot_size);
2137
2138
2139
2140
2141 void TransferSlotAfterSoo(CommonFields& c, size_t slot_size);
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171 void GrowIntoSingleGroupShuffleControlBytes(ctrl_t* new_ctrl,
2172 size_t new_capacity) const;
2173
2174
2175
2176
2177
2178 void InitControlBytesAfterSoo(ctrl_t* new_ctrl, ctrl_t h2,
2179 size_t new_capacity);
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194 void GrowIntoSingleGroupShuffleTransferableSlots(void* new_slots,
2195 size_t slot_size) const;
2196
2197
2198
2199
2200
2201
2202
2203
2204 void PoisonSingleGroupEmptySlots(CommonFields& c, size_t slot_size) const {
2205
2206 for (size_t i = 0; i < c.capacity(); ++i) {
2207 if (!IsFull(c.control()[i])) {
2208 SanitizerPoisonMemoryRegion(SlotAddress(c.slot_array(), i, slot_size),
2209 slot_size);
2210 }
2211 }
2212 }
2213
2214 HeapOrSoo old_heap_or_soo_;
2215 size_t old_capacity_;
2216 bool had_infoz_;
2217 bool was_soo_;
2218 bool had_soo_slot_;
2219
2220 HashtablezInfoHandle forced_infoz_;
2221 };
2222
2223 inline void PrepareInsertCommon(CommonFields& common) {
2224 common.increment_size();
2225 common.maybe_increment_generation_on_insert();
2226 }
2227
2228
2229 size_t PrepareInsertAfterSoo(size_t hash, size_t slot_size,
2230 CommonFields& common);
2231
2232
2233
2234
2235
2236 struct PolicyFunctions {
2237 size_t slot_size;
2238
2239
2240 const void* (*hash_fn)(const CommonFields& common);
2241
2242
2243 size_t (*hash_slot)(const void* hash_fn, void* slot);
2244
2245
2246 void (*transfer)(void* set, void* dst_slot, void* src_slot);
2247
2248
2249 void (*dealloc)(CommonFields& common, const PolicyFunctions& policy);
2250
2251
2252
2253 void (*resize)(CommonFields& common, size_t new_capacity,
2254 HashtablezInfoHandle forced_infoz);
2255 };
2256
2257
2258
2259
2260 void ClearBackingArray(CommonFields& c, const PolicyFunctions& policy,
2261 bool reuse, bool soo_enabled);
2262
2263
2264 void EraseMetaOnly(CommonFields& c, size_t index, size_t slot_size);
2265
2266
2267
2268
2269
2270 template <size_t AlignOfSlot>
2271 ABSL_ATTRIBUTE_NOINLINE void DeallocateStandard(CommonFields& common,
2272 const PolicyFunctions& policy) {
2273
2274 SanitizerUnpoisonMemoryRegion(common.slot_array(),
2275 policy.slot_size * common.capacity());
2276
2277 std::allocator<char> alloc;
2278 common.infoz().Unregister();
2279 Deallocate<BackingArrayAlignment(AlignOfSlot)>(
2280 &alloc, common.backing_array_start(),
2281 common.alloc_size(policy.slot_size, AlignOfSlot));
2282 }
2283
2284
2285
2286
2287 template <size_t SizeOfSlot>
2288 ABSL_ATTRIBUTE_NOINLINE void TransferRelocatable(void*, void* dst, void* src) {
2289 memcpy(dst, src, SizeOfSlot);
2290 }
2291
2292
2293 const void* GetHashRefForEmptyHasher(const CommonFields& common);
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310 size_t PrepareInsertNonSoo(CommonFields& common, size_t hash, FindInfo target,
2311 const PolicyFunctions& policy);
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333 template <class Policy, class Hash, class Eq, class Alloc>
2334 class raw_hash_set {
2335 using PolicyTraits = hash_policy_traits<Policy>;
2336 using KeyArgImpl =
2337 KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>;
2338
2339 public:
2340 using init_type = typename PolicyTraits::init_type;
2341 using key_type = typename PolicyTraits::key_type;
2342
2343
2344 using slot_type = typename PolicyTraits::slot_type;
2345 using allocator_type = Alloc;
2346 using size_type = size_t;
2347 using difference_type = ptrdiff_t;
2348 using hasher = Hash;
2349 using key_equal = Eq;
2350 using policy_type = Policy;
2351 using value_type = typename PolicyTraits::value_type;
2352 using reference = value_type&;
2353 using const_reference = const value_type&;
2354 using pointer = typename absl::allocator_traits<
2355 allocator_type>::template rebind_traits<value_type>::pointer;
2356 using const_pointer = typename absl::allocator_traits<
2357 allocator_type>::template rebind_traits<value_type>::const_pointer;
2358
2359
2360
2361
2362
2363 template <class K>
2364 using key_arg = typename KeyArgImpl::template type<K, key_type>;
2365
2366 private:
2367
2368
2369
2370
2371
2372
2373 constexpr static bool SooEnabled() {
2374 return PolicyTraits::soo_enabled() &&
2375 sizeof(slot_type) <= sizeof(HeapOrSoo) &&
2376 alignof(slot_type) <= alignof(HeapOrSoo);
2377 }
2378
2379
2380 bool fits_in_soo(size_t size) const {
2381 return SooEnabled() && size <= SooCapacity();
2382 }
2383
2384 bool is_soo() const { return fits_in_soo(capacity()); }
2385 bool is_full_soo() const { return is_soo() && !empty(); }
2386
2387
2388 auto KeyTypeCanBeHashed(const Hash& h, const key_type& k) -> decltype(h(k));
2389 auto KeyTypeCanBeEq(const Eq& eq, const key_type& k) -> decltype(eq(k, k));
2390
2391 using AllocTraits = absl::allocator_traits<allocator_type>;
2392 using SlotAlloc = typename absl::allocator_traits<
2393 allocator_type>::template rebind_alloc<slot_type>;
2394
2395
2396
2397 using CharAlloc =
2398 typename absl::allocator_traits<Alloc>::template rebind_alloc<char>;
2399 using SlotAllocTraits = typename absl::allocator_traits<
2400 allocator_type>::template rebind_traits<slot_type>;
2401
2402 static_assert(std::is_lvalue_reference<reference>::value,
2403 "Policy::element() must return a reference");
2404
2405 template <typename T>
2406 struct SameAsElementReference
2407 : std::is_same<typename std::remove_cv<
2408 typename std::remove_reference<reference>::type>::type,
2409 typename std::remove_cv<
2410 typename std::remove_reference<T>::type>::type> {};
2411
2412
2413
2414
2415
2416
2417 template <class T>
2418 using RequiresInsertable = typename std::enable_if<
2419 absl::disjunction<std::is_convertible<T, init_type>,
2420 SameAsElementReference<T>>::value,
2421 int>::type;
2422
2423
2424
2425 template <class T>
2426 using RequiresNotInit =
2427 typename std::enable_if<!std::is_same<T, init_type>::value, int>::type;
2428
2429 template <class... Ts>
2430 using IsDecomposable = IsDecomposable<void, PolicyTraits, Hash, Eq, Ts...>;
2431
2432 public:
2433 static_assert(std::is_same<pointer, value_type*>::value,
2434 "Allocators with custom pointer types are not supported");
2435 static_assert(std::is_same<const_pointer, const value_type*>::value,
2436 "Allocators with custom pointer types are not supported");
2437
2438 class iterator : private HashSetIteratorGenerationInfo {
2439 friend class raw_hash_set;
2440 friend struct HashtableFreeFunctionsAccess;
2441
2442 public:
2443 using iterator_category = std::forward_iterator_tag;
2444 using value_type = typename raw_hash_set::value_type;
2445 using reference =
2446 absl::conditional_t<PolicyTraits::constant_iterators::value,
2447 const value_type&, value_type&>;
2448 using pointer = absl::remove_reference_t<reference>*;
2449 using difference_type = typename raw_hash_set::difference_type;
2450
2451 iterator() {}
2452
2453
2454 reference operator*() const {
2455 AssertIsFull(ctrl_, generation(), generation_ptr(), "operator*()");
2456 return unchecked_deref();
2457 }
2458
2459
2460 pointer operator->() const {
2461 AssertIsFull(ctrl_, generation(), generation_ptr(), "operator->");
2462 return &operator*();
2463 }
2464
2465
2466 iterator& operator++() {
2467 AssertIsFull(ctrl_, generation(), generation_ptr(), "operator++");
2468 ++ctrl_;
2469 ++slot_;
2470 skip_empty_or_deleted();
2471 if (ABSL_PREDICT_FALSE(*ctrl_ == ctrl_t::kSentinel)) ctrl_ = nullptr;
2472 return *this;
2473 }
2474
2475 iterator operator++(int) {
2476 auto tmp = *this;
2477 ++*this;
2478 return tmp;
2479 }
2480
2481 friend bool operator==(const iterator& a, const iterator& b) {
2482 AssertIsValidForComparison(a.ctrl_, a.generation(), a.generation_ptr());
2483 AssertIsValidForComparison(b.ctrl_, b.generation(), b.generation_ptr());
2484 AssertSameContainer(a.ctrl_, b.ctrl_, a.slot_, b.slot_,
2485 a.generation_ptr(), b.generation_ptr());
2486 return a.ctrl_ == b.ctrl_;
2487 }
2488 friend bool operator!=(const iterator& a, const iterator& b) {
2489 return !(a == b);
2490 }
2491
2492 private:
2493 iterator(ctrl_t* ctrl, slot_type* slot,
2494 const GenerationType* generation_ptr)
2495 : HashSetIteratorGenerationInfo(generation_ptr),
2496 ctrl_(ctrl),
2497 slot_(slot) {
2498
2499
2500 ABSL_ASSUME(ctrl != nullptr);
2501 }
2502
2503
2504
2505 iterator(ctrl_t* ctrl, MaybeInitializedPtr slot,
2506 const GenerationType* generation_ptr)
2507 : HashSetIteratorGenerationInfo(generation_ptr),
2508 ctrl_(ctrl),
2509 slot_(to_slot(slot.get())) {
2510
2511
2512 ABSL_ASSUME(ctrl != nullptr);
2513 }
2514
2515 explicit iterator(const GenerationType* generation_ptr)
2516 : HashSetIteratorGenerationInfo(generation_ptr), ctrl_(nullptr) {}
2517
2518
2519
2520 void skip_empty_or_deleted() {
2521 while (IsEmptyOrDeleted(*ctrl_)) {
2522 uint32_t shift =
2523 GroupFullEmptyOrDeleted{ctrl_}.CountLeadingEmptyOrDeleted();
2524 ctrl_ += shift;
2525 slot_ += shift;
2526 }
2527 }
2528
2529 ctrl_t* control() const { return ctrl_; }
2530 slot_type* slot() const { return slot_; }
2531
2532
2533
2534 ctrl_t* ctrl_ = EmptyGroup();
2535
2536
2537 union {
2538 slot_type* slot_;
2539 };
2540
2541
2542
2543
2544
2545 bool unchecked_equals(const iterator& b) { return ctrl_ == b.control(); }
2546
2547
2548
2549 reference unchecked_deref() const { return PolicyTraits::element(slot_); }
2550 };
2551
2552 class const_iterator {
2553 friend class raw_hash_set;
2554 template <class Container, typename Enabler>
2555 friend struct absl::container_internal::hashtable_debug_internal::
2556 HashtableDebugAccess;
2557
2558 public:
2559 using iterator_category = typename iterator::iterator_category;
2560 using value_type = typename raw_hash_set::value_type;
2561 using reference = typename raw_hash_set::const_reference;
2562 using pointer = typename raw_hash_set::const_pointer;
2563 using difference_type = typename raw_hash_set::difference_type;
2564
2565 const_iterator() = default;
2566
2567 const_iterator(iterator i) : inner_(std::move(i)) {}
2568
2569 reference operator*() const { return *inner_; }
2570 pointer operator->() const { return inner_.operator->(); }
2571
2572 const_iterator& operator++() {
2573 ++inner_;
2574 return *this;
2575 }
2576 const_iterator operator++(int) { return inner_++; }
2577
2578 friend bool operator==(const const_iterator& a, const const_iterator& b) {
2579 return a.inner_ == b.inner_;
2580 }
2581 friend bool operator!=(const const_iterator& a, const const_iterator& b) {
2582 return !(a == b);
2583 }
2584
2585 private:
2586 const_iterator(const ctrl_t* ctrl, const slot_type* slot,
2587 const GenerationType* gen)
2588 : inner_(const_cast<ctrl_t*>(ctrl), const_cast<slot_type*>(slot), gen) {
2589 }
2590 ctrl_t* control() const { return inner_.control(); }
2591 slot_type* slot() const { return inner_.slot(); }
2592
2593 iterator inner_;
2594
2595 bool unchecked_equals(const const_iterator& b) {
2596 return inner_.unchecked_equals(b.inner_);
2597 }
2598 };
2599
2600 using node_type = node_handle<Policy, hash_policy_traits<Policy>, Alloc>;
2601 using insert_return_type = InsertReturnType<iterator, node_type>;
2602
2603
2604
2605 raw_hash_set() noexcept(
2606 std::is_nothrow_default_constructible<hasher>::value &&
2607 std::is_nothrow_default_constructible<key_equal>::value &&
2608 std::is_nothrow_default_constructible<allocator_type>::value) {}
2609
2610 ABSL_ATTRIBUTE_NOINLINE explicit raw_hash_set(
2611 size_t bucket_count, const hasher& hash = hasher(),
2612 const key_equal& eq = key_equal(),
2613 const allocator_type& alloc = allocator_type())
2614 : settings_(CommonFields::CreateDefault<SooEnabled()>(), hash, eq,
2615 alloc) {
2616 if (bucket_count > (SooEnabled() ? SooCapacity() : 0)) {
2617 resize(NormalizeCapacity(bucket_count));
2618 }
2619 }
2620
2621 raw_hash_set(size_t bucket_count, const hasher& hash,
2622 const allocator_type& alloc)
2623 : raw_hash_set(bucket_count, hash, key_equal(), alloc) {}
2624
2625 raw_hash_set(size_t bucket_count, const allocator_type& alloc)
2626 : raw_hash_set(bucket_count, hasher(), key_equal(), alloc) {}
2627
2628 explicit raw_hash_set(const allocator_type& alloc)
2629 : raw_hash_set(0, hasher(), key_equal(), alloc) {}
2630
2631 template <class InputIter>
2632 raw_hash_set(InputIter first, InputIter last, size_t bucket_count = 0,
2633 const hasher& hash = hasher(), const key_equal& eq = key_equal(),
2634 const allocator_type& alloc = allocator_type())
2635 : raw_hash_set(SelectBucketCountForIterRange(first, last, bucket_count),
2636 hash, eq, alloc) {
2637 insert(first, last);
2638 }
2639
2640 template <class InputIter>
2641 raw_hash_set(InputIter first, InputIter last, size_t bucket_count,
2642 const hasher& hash, const allocator_type& alloc)
2643 : raw_hash_set(first, last, bucket_count, hash, key_equal(), alloc) {}
2644
2645 template <class InputIter>
2646 raw_hash_set(InputIter first, InputIter last, size_t bucket_count,
2647 const allocator_type& alloc)
2648 : raw_hash_set(first, last, bucket_count, hasher(), key_equal(), alloc) {}
2649
2650 template <class InputIter>
2651 raw_hash_set(InputIter first, InputIter last, const allocator_type& alloc)
2652 : raw_hash_set(first, last, 0, hasher(), key_equal(), alloc) {}
2653
2654
2655
2656
2657
2658
2659
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669
2670
2671
2672
2673
2674
2675 template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
2676 raw_hash_set(std::initializer_list<T> init, size_t bucket_count = 0,
2677 const hasher& hash = hasher(), const key_equal& eq = key_equal(),
2678 const allocator_type& alloc = allocator_type())
2679 : raw_hash_set(init.begin(), init.end(), bucket_count, hash, eq, alloc) {}
2680
2681 raw_hash_set(std::initializer_list<init_type> init, size_t bucket_count = 0,
2682 const hasher& hash = hasher(), const key_equal& eq = key_equal(),
2683 const allocator_type& alloc = allocator_type())
2684 : raw_hash_set(init.begin(), init.end(), bucket_count, hash, eq, alloc) {}
2685
2686 template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
2687 raw_hash_set(std::initializer_list<T> init, size_t bucket_count,
2688 const hasher& hash, const allocator_type& alloc)
2689 : raw_hash_set(init, bucket_count, hash, key_equal(), alloc) {}
2690
2691 raw_hash_set(std::initializer_list<init_type> init, size_t bucket_count,
2692 const hasher& hash, const allocator_type& alloc)
2693 : raw_hash_set(init, bucket_count, hash, key_equal(), alloc) {}
2694
2695 template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
2696 raw_hash_set(std::initializer_list<T> init, size_t bucket_count,
2697 const allocator_type& alloc)
2698 : raw_hash_set(init, bucket_count, hasher(), key_equal(), alloc) {}
2699
2700 raw_hash_set(std::initializer_list<init_type> init, size_t bucket_count,
2701 const allocator_type& alloc)
2702 : raw_hash_set(init, bucket_count, hasher(), key_equal(), alloc) {}
2703
2704 template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
2705 raw_hash_set(std::initializer_list<T> init, const allocator_type& alloc)
2706 : raw_hash_set(init, 0, hasher(), key_equal(), alloc) {}
2707
2708 raw_hash_set(std::initializer_list<init_type> init,
2709 const allocator_type& alloc)
2710 : raw_hash_set(init, 0, hasher(), key_equal(), alloc) {}
2711
2712 raw_hash_set(const raw_hash_set& that)
2713 : raw_hash_set(that, AllocTraits::select_on_container_copy_construction(
2714 that.alloc_ref())) {}
2715
2716 raw_hash_set(const raw_hash_set& that, const allocator_type& a)
2717 : raw_hash_set(GrowthToLowerboundCapacity(that.size()), that.hash_ref(),
2718 that.eq_ref(), a) {
2719 const size_t size = that.size();
2720 if (size == 0) {
2721 return;
2722 }
2723
2724
2725 if (fits_in_soo(size)) {
2726 assert(size == 1);
2727 common().set_full_soo();
2728 emplace_at(soo_iterator(), *that.begin());
2729 const HashtablezInfoHandle infoz = try_sample_soo();
2730 if (infoz.IsSampled()) resize_with_soo_infoz(infoz);
2731 return;
2732 }
2733 assert(!that.is_soo());
2734 const size_t cap = capacity();
2735
2736
2737
2738
2739
2740
2741
2742
2743
2744 size_t offset = cap;
2745 const size_t shift =
2746 is_single_group(cap) ? (PerTableSalt(control()) | 1) : 0;
2747 IterateOverFullSlots(
2748 that.common(), that.slot_array(),
2749 [&](const ctrl_t* that_ctrl,
2750 slot_type* that_slot) ABSL_ATTRIBUTE_ALWAYS_INLINE {
2751 if (shift == 0) {
2752
2753
2754
2755 const size_t hash = PolicyTraits::apply(
2756 HashElement{hash_ref()}, PolicyTraits::element(that_slot));
2757 FindInfo target = find_first_non_full_outofline(common(), hash);
2758 infoz().RecordInsert(hash, target.probe_length);
2759 offset = target.offset;
2760 } else {
2761
2762 offset = (offset + shift) & cap;
2763 }
2764 const h2_t h2 = static_cast<h2_t>(*that_ctrl);
2765 assert(
2766 H2(PolicyTraits::apply(HashElement{hash_ref()},
2767 PolicyTraits::element(that_slot))) == h2 &&
2768 "hash function value changed unexpectedly during the copy");
2769 SetCtrl(common(), offset, h2, sizeof(slot_type));
2770 emplace_at(iterator_at(offset), PolicyTraits::element(that_slot));
2771 common().maybe_increment_generation_on_insert();
2772 });
2773 if (shift != 0) {
2774
2775
2776 infoz().RecordStorageChanged(size, cap);
2777 }
2778 common().set_size(size);
2779 growth_info().OverwriteManyEmptyAsFull(size);
2780 }
2781
2782 ABSL_ATTRIBUTE_NOINLINE raw_hash_set(raw_hash_set&& that) noexcept(
2783 std::is_nothrow_copy_constructible<hasher>::value &&
2784 std::is_nothrow_copy_constructible<key_equal>::value &&
2785 std::is_nothrow_copy_constructible<allocator_type>::value)
2786 :
2787
2788
2789
2790
2791 settings_(PolicyTraits::transfer_uses_memcpy() || !that.is_full_soo()
2792 ? std::move(that.common())
2793 : CommonFields{full_soo_tag_t{}},
2794 that.hash_ref(), that.eq_ref(), that.alloc_ref()) {
2795 if (!PolicyTraits::transfer_uses_memcpy() && that.is_full_soo()) {
2796 transfer(soo_slot(), that.soo_slot());
2797 }
2798 that.common() = CommonFields::CreateDefault<SooEnabled()>();
2799 maybe_increment_generation_or_rehash_on_move();
2800 }
2801
2802 raw_hash_set(raw_hash_set&& that, const allocator_type& a)
2803 : settings_(CommonFields::CreateDefault<SooEnabled()>(), that.hash_ref(),
2804 that.eq_ref(), a) {
2805 if (a == that.alloc_ref()) {
2806 swap_common(that);
2807 maybe_increment_generation_or_rehash_on_move();
2808 } else {
2809 move_elements_allocs_unequal(std::move(that));
2810 }
2811 }
2812
2813 raw_hash_set& operator=(const raw_hash_set& that) {
2814 if (ABSL_PREDICT_FALSE(this == &that)) return *this;
2815 constexpr bool propagate_alloc =
2816 AllocTraits::propagate_on_container_copy_assignment::value;
2817
2818
2819
2820
2821 raw_hash_set tmp(that, propagate_alloc ? that.alloc_ref() : alloc_ref());
2822
2823 return assign_impl<propagate_alloc>(std::move(tmp));
2824 }
2825
2826 raw_hash_set& operator=(raw_hash_set&& that) noexcept(
2827 absl::allocator_traits<allocator_type>::is_always_equal::value &&
2828 std::is_nothrow_move_assignable<hasher>::value &&
2829 std::is_nothrow_move_assignable<key_equal>::value) {
2830
2831
2832
2833 return move_assign(
2834 std::move(that),
2835 typename AllocTraits::propagate_on_container_move_assignment());
2836 }
2837
2838 ~raw_hash_set() { destructor_impl(); }
2839
2840 iterator begin() ABSL_ATTRIBUTE_LIFETIME_BOUND {
2841 if (ABSL_PREDICT_FALSE(empty())) return end();
2842 if (is_soo()) return soo_iterator();
2843 iterator it = {control(), common().slots_union(),
2844 common().generation_ptr()};
2845 it.skip_empty_or_deleted();
2846 assert(IsFull(*it.control()));
2847 return it;
2848 }
2849 iterator end() ABSL_ATTRIBUTE_LIFETIME_BOUND {
2850 return iterator(common().generation_ptr());
2851 }
2852
2853 const_iterator begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
2854 return const_cast<raw_hash_set*>(this)->begin();
2855 }
2856 const_iterator end() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
2857 return iterator(common().generation_ptr());
2858 }
2859 const_iterator cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
2860 return begin();
2861 }
2862 const_iterator cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return end(); }
2863
2864 bool empty() const { return !size(); }
2865 size_t size() const { return common().size(); }
2866 size_t capacity() const {
2867 const size_t cap = common().capacity();
2868
2869 ABSL_ATTRIBUTE_UNUSED static constexpr bool kEnabled = SooEnabled();
2870 ABSL_ATTRIBUTE_UNUSED static constexpr size_t kCapacity = SooCapacity();
2871 ABSL_ASSUME(!kEnabled || cap >= kCapacity);
2872 return cap;
2873 }
2874 size_t max_size() const { return (std::numeric_limits<size_t>::max)(); }
2875
2876 ABSL_ATTRIBUTE_REINITIALIZES void clear() {
2877
2878
2879
2880
2881
2882
2883
2884 const size_t cap = capacity();
2885 if (cap == 0) {
2886
2887 } else if (is_soo()) {
2888 if (!empty()) destroy(soo_slot());
2889 common().set_empty_soo();
2890 } else {
2891 destroy_slots();
2892 ClearBackingArray(common(), GetPolicyFunctions(), cap < 128,
2893 SooEnabled());
2894 }
2895 common().set_reserved_growth(0);
2896 common().set_reservation_size(0);
2897 }
2898
2899
2900
2901
2902
2903
2904
2905
2906 template <class T, RequiresInsertable<T> = 0, class T2 = T,
2907 typename std::enable_if<IsDecomposable<T2>::value, int>::type = 0,
2908 T* = nullptr>
2909 std::pair<iterator, bool> insert(T&& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
2910 return emplace(std::forward<T>(value));
2911 }
2912
2913
2914
2915
2916
2917
2918
2919
2920
2921
2922
2923
2924 template <
2925 class T, RequiresInsertable<const T&> = 0,
2926 typename std::enable_if<IsDecomposable<const T&>::value, int>::type = 0>
2927 std::pair<iterator, bool> insert(const T& value)
2928 ABSL_ATTRIBUTE_LIFETIME_BOUND {
2929 return emplace(value);
2930 }
2931
2932
2933
2934
2935
2936
2937 std::pair<iterator, bool> insert(init_type&& value)
2938 ABSL_ATTRIBUTE_LIFETIME_BOUND {
2939 return emplace(std::move(value));
2940 }
2941
2942
2943
2944 template <class T, RequiresInsertable<T> = 0, class T2 = T,
2945 typename std::enable_if<IsDecomposable<T2>::value, int>::type = 0,
2946 T* = nullptr>
2947 iterator insert(const_iterator, T&& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
2948 return insert(std::forward<T>(value)).first;
2949 }
2950
2951 template <
2952 class T, RequiresInsertable<const T&> = 0,
2953 typename std::enable_if<IsDecomposable<const T&>::value, int>::type = 0>
2954 iterator insert(const_iterator,
2955 const T& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
2956 return insert(value).first;
2957 }
2958
2959 iterator insert(const_iterator,
2960 init_type&& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
2961 return insert(std::move(value)).first;
2962 }
2963
2964 template <class InputIt>
2965 void insert(InputIt first, InputIt last) {
2966 for (; first != last; ++first) emplace(*first);
2967 }
2968
2969 template <class T, RequiresNotInit<T> = 0, RequiresInsertable<const T&> = 0>
2970 void insert(std::initializer_list<T> ilist) {
2971 insert(ilist.begin(), ilist.end());
2972 }
2973
2974 void insert(std::initializer_list<init_type> ilist) {
2975 insert(ilist.begin(), ilist.end());
2976 }
2977
2978 insert_return_type insert(node_type&& node) ABSL_ATTRIBUTE_LIFETIME_BOUND {
2979 if (!node) return {end(), false, node_type()};
2980 const auto& elem = PolicyTraits::element(CommonAccess::GetSlot(node));
2981 auto res = PolicyTraits::apply(
2982 InsertSlot<false>{*this, std::move(*CommonAccess::GetSlot(node))},
2983 elem);
2984 if (res.second) {
2985 CommonAccess::Reset(&node);
2986 return {res.first, true, node_type()};
2987 } else {
2988 return {res.first, false, std::move(node)};
2989 }
2990 }
2991
2992 iterator insert(const_iterator,
2993 node_type&& node) ABSL_ATTRIBUTE_LIFETIME_BOUND {
2994 auto res = insert(std::move(node));
2995 node = std::move(res.node);
2996 return res.position;
2997 }
2998
2999
3000
3001
3002
3003
3004
3005
3006
3007
3008 template <class... Args, typename std::enable_if<
3009 IsDecomposable<Args...>::value, int>::type = 0>
3010 std::pair<iterator, bool> emplace(Args&&... args)
3011 ABSL_ATTRIBUTE_LIFETIME_BOUND {
3012 return PolicyTraits::apply(EmplaceDecomposable{*this},
3013 std::forward<Args>(args)...);
3014 }
3015
3016
3017
3018
3019 template <class... Args, typename std::enable_if<
3020 !IsDecomposable<Args...>::value, int>::type = 0>
3021 std::pair<iterator, bool> emplace(Args&&... args)
3022 ABSL_ATTRIBUTE_LIFETIME_BOUND {
3023 alignas(slot_type) unsigned char raw[sizeof(slot_type)];
3024 slot_type* slot = to_slot(&raw);
3025
3026 construct(slot, std::forward<Args>(args)...);
3027 const auto& elem = PolicyTraits::element(slot);
3028 return PolicyTraits::apply(InsertSlot<true>{*this, std::move(*slot)}, elem);
3029 }
3030
3031 template <class... Args>
3032 iterator emplace_hint(const_iterator,
3033 Args&&... args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
3034 return emplace(std::forward<Args>(args)...).first;
3035 }
3036
3037
3038
3039
3040
3041
3042
3043
3044
3045
3046
3047
3048
3049
3050
3051
3052
3053
3054
3055
3056
3057
3058
3059
3060
3061
3062
3063
3064
3065 class constructor {
3066 friend class raw_hash_set;
3067
3068 public:
3069 template <class... Args>
3070 void operator()(Args&&... args) const {
3071 assert(*slot_);
3072 PolicyTraits::construct(alloc_, *slot_, std::forward<Args>(args)...);
3073 *slot_ = nullptr;
3074 }
3075
3076 private:
3077 constructor(allocator_type* a, slot_type** slot) : alloc_(a), slot_(slot) {}
3078
3079 allocator_type* alloc_;
3080 slot_type** slot_;
3081 };
3082
3083 template <class K = key_type, class F>
3084 iterator lazy_emplace(const key_arg<K>& key,
3085 F&& f) ABSL_ATTRIBUTE_LIFETIME_BOUND {
3086 auto res = find_or_prepare_insert(key);
3087 if (res.second) {
3088 slot_type* slot = res.first.slot();
3089 std::forward<F>(f)(constructor(&alloc_ref(), &slot));
3090 assert(!slot);
3091 }
3092 return res.first;
3093 }
3094
3095
3096
3097
3098
3099
3100
3101
3102
3103
3104 template <class K = key_type>
3105 size_type erase(const key_arg<K>& key) {
3106 auto it = find(key);
3107 if (it == end()) return 0;
3108 erase(it);
3109 return 1;
3110 }
3111
3112
3113
3114
3115
3116
3117
3118
3119
3120
3121
3122
3123
3124
3125 void erase(const_iterator cit) { erase(cit.inner_); }
3126
3127
3128
3129 void erase(iterator it) {
3130 AssertIsFull(it.control(), it.generation(), it.generation_ptr(), "erase()");
3131 destroy(it.slot());
3132 if (is_soo()) {
3133 common().set_empty_soo();
3134 } else {
3135 erase_meta_only(it);
3136 }
3137 }
3138
3139 iterator erase(const_iterator first,
3140 const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND {
3141
3142
3143 if (empty()) return end();
3144 if (first == last) return last.inner_;
3145 if (is_soo()) {
3146 destroy(soo_slot());
3147 common().set_empty_soo();
3148 return end();
3149 }
3150 if (first == begin() && last == end()) {
3151
3152
3153
3154 destroy_slots();
3155 ClearBackingArray(common(), GetPolicyFunctions(), true,
3156 SooEnabled());
3157 common().set_reserved_growth(common().reservation_size());
3158 return end();
3159 }
3160 while (first != last) {
3161 erase(first++);
3162 }
3163 return last.inner_;
3164 }
3165
3166
3167
3168 template <typename H, typename E>
3169 void merge(raw_hash_set<Policy, H, E, Alloc>& src) {
3170 assert(this != &src);
3171
3172 const auto insert_slot = [this](slot_type* src_slot) {
3173 return PolicyTraits::apply(InsertSlot<false>{*this, std::move(*src_slot)},
3174 PolicyTraits::element(src_slot))
3175 .second;
3176 };
3177
3178 if (src.is_soo()) {
3179 if (src.empty()) return;
3180 if (insert_slot(src.soo_slot())) src.common().set_empty_soo();
3181 return;
3182 }
3183 for (auto it = src.begin(), e = src.end(); it != e;) {
3184 auto next = std::next(it);
3185 if (insert_slot(it.slot())) src.erase_meta_only(it);
3186 it = next;
3187 }
3188 }
3189
3190 template <typename H, typename E>
3191 void merge(raw_hash_set<Policy, H, E, Alloc>&& src) {
3192 merge(src);
3193 }
3194
3195 node_type extract(const_iterator position) {
3196 AssertIsFull(position.control(), position.inner_.generation(),
3197 position.inner_.generation_ptr(), "extract()");
3198 auto node = CommonAccess::Transfer<node_type>(alloc_ref(), position.slot());
3199 if (is_soo()) {
3200 common().set_empty_soo();
3201 } else {
3202 erase_meta_only(position);
3203 }
3204 return node;
3205 }
3206
3207 template <
3208 class K = key_type,
3209 typename std::enable_if<!std::is_same<K, iterator>::value, int>::type = 0>
3210 node_type extract(const key_arg<K>& key) {
3211 auto it = find(key);
3212 return it == end() ? node_type() : extract(const_iterator{it});
3213 }
3214
3215 void swap(raw_hash_set& that) noexcept(
3216 IsNoThrowSwappable<hasher>() && IsNoThrowSwappable<key_equal>() &&
3217 IsNoThrowSwappable<allocator_type>(
3218 typename AllocTraits::propagate_on_container_swap{})) {
3219 using std::swap;
3220 swap_common(that);
3221 swap(hash_ref(), that.hash_ref());
3222 swap(eq_ref(), that.eq_ref());
3223 SwapAlloc(alloc_ref(), that.alloc_ref(),
3224 typename AllocTraits::propagate_on_container_swap{});
3225 }
3226
3227 void rehash(size_t n) {
3228 const size_t cap = capacity();
3229 if (n == 0) {
3230 if (cap == 0 || is_soo()) return;
3231 if (empty()) {
3232 ClearBackingArray(common(), GetPolicyFunctions(), false,
3233 SooEnabled());
3234 return;
3235 }
3236 if (fits_in_soo(size())) {
3237
3238 if (infoz().IsSampled()) {
3239 const size_t kInitialSampledCapacity = NextCapacity(SooCapacity());
3240 if (capacity() > kInitialSampledCapacity) {
3241 resize(kInitialSampledCapacity);
3242 }
3243
3244 assert(infoz().IsSampled());
3245 return;
3246 }
3247 alignas(slot_type) unsigned char slot_space[sizeof(slot_type)];
3248 slot_type* tmp_slot = to_slot(slot_space);
3249 transfer(tmp_slot, begin().slot());
3250 ClearBackingArray(common(), GetPolicyFunctions(), false,
3251 SooEnabled());
3252 transfer(soo_slot(), tmp_slot);
3253 common().set_full_soo();
3254 return;
3255 }
3256 }
3257
3258
3259
3260 auto m = NormalizeCapacity(n | GrowthToLowerboundCapacity(size()));
3261
3262 if (n == 0 || m > cap) {
3263 resize(m);
3264
3265
3266
3267 infoz().RecordReservation(n);
3268 }
3269 }
3270
3271 void reserve(size_t n) {
3272 const size_t max_size_before_growth =
3273 is_soo() ? SooCapacity() : size() + growth_left();
3274 if (n > max_size_before_growth) {
3275 size_t m = GrowthToLowerboundCapacity(n);
3276 resize(NormalizeCapacity(m));
3277
3278
3279
3280 infoz().RecordReservation(n);
3281 }
3282 common().reset_reserved_growth(n);
3283 common().set_reservation_size(n);
3284 }
3285
3286
3287
3288
3289
3290
3291
3292
3293
3294
3295 template <class K = key_type>
3296 size_t count(const key_arg<K>& key) const {
3297 return find(key) == end() ? 0 : 1;
3298 }
3299
3300
3301
3302
3303
3304
3305 template <class K = key_type>
3306 void prefetch(const key_arg<K>& key) const {
3307 if (SooEnabled() ? is_soo() : capacity() == 0) return;
3308 (void)key;
3309
3310 #ifdef ABSL_HAVE_PREFETCH
3311 prefetch_heap_block();
3312 auto seq = probe(common(), hash_ref()(key));
3313 PrefetchToLocalCache(control() + seq.offset());
3314 PrefetchToLocalCache(slot_array() + seq.offset());
3315 #endif
3316 }
3317
3318
3319
3320
3321
3322
3323
3324
3325 template <class K = key_type>
3326 iterator find(const key_arg<K>& key,
3327 size_t hash) ABSL_ATTRIBUTE_LIFETIME_BOUND {
3328 AssertHashEqConsistent(key);
3329 if (is_soo()) return find_soo(key);
3330 return find_non_soo(key, hash);
3331 }
3332 template <class K = key_type>
3333 iterator find(const key_arg<K>& key) ABSL_ATTRIBUTE_LIFETIME_BOUND {
3334 AssertHashEqConsistent(key);
3335 if (is_soo()) return find_soo(key);
3336 prefetch_heap_block();
3337 return find_non_soo(key, hash_ref()(key));
3338 }
3339
3340 template <class K = key_type>
3341 const_iterator find(const key_arg<K>& key,
3342 size_t hash) const ABSL_ATTRIBUTE_LIFETIME_BOUND {
3343 return const_cast<raw_hash_set*>(this)->find(key, hash);
3344 }
3345 template <class K = key_type>
3346 const_iterator find(const key_arg<K>& key) const
3347 ABSL_ATTRIBUTE_LIFETIME_BOUND {
3348 return const_cast<raw_hash_set*>(this)->find(key);
3349 }
3350
3351 template <class K = key_type>
3352 bool contains(const key_arg<K>& key) const {
3353
3354
3355
3356
3357 return !find(key).unchecked_equals(end());
3358 }
3359
3360 template <class K = key_type>
3361 std::pair<iterator, iterator> equal_range(const key_arg<K>& key)
3362 ABSL_ATTRIBUTE_LIFETIME_BOUND {
3363 auto it = find(key);
3364 if (it != end()) return {it, std::next(it)};
3365 return {it, it};
3366 }
3367 template <class K = key_type>
3368 std::pair<const_iterator, const_iterator> equal_range(
3369 const key_arg<K>& key) const ABSL_ATTRIBUTE_LIFETIME_BOUND {
3370 auto it = find(key);
3371 if (it != end()) return {it, std::next(it)};
3372 return {it, it};
3373 }
3374
3375 size_t bucket_count() const { return capacity(); }
3376 float load_factor() const {
3377 return capacity() ? static_cast<double>(size()) / capacity() : 0.0;
3378 }
3379 float max_load_factor() const { return 1.0f; }
3380 void max_load_factor(float) {
3381
3382 }
3383
3384 hasher hash_function() const { return hash_ref(); }
3385 key_equal key_eq() const { return eq_ref(); }
3386 allocator_type get_allocator() const { return alloc_ref(); }
3387
3388 friend bool operator==(const raw_hash_set& a, const raw_hash_set& b) {
3389 if (a.size() != b.size()) return false;
3390 const raw_hash_set* outer = &a;
3391 const raw_hash_set* inner = &b;
3392 if (outer->capacity() > inner->capacity()) std::swap(outer, inner);
3393 for (const value_type& elem : *outer) {
3394 auto it = PolicyTraits::apply(FindElement{*inner}, elem);
3395 if (it == inner->end() || !(*it == elem)) return false;
3396 }
3397 return true;
3398 }
3399
3400 friend bool operator!=(const raw_hash_set& a, const raw_hash_set& b) {
3401 return !(a == b);
3402 }
3403
3404 template <typename H>
3405 friend typename std::enable_if<H::template is_hashable<value_type>::value,
3406 H>::type
3407 AbslHashValue(H h, const raw_hash_set& s) {
3408 return H::combine(H::combine_unordered(std::move(h), s.begin(), s.end()),
3409 s.size());
3410 }
3411
3412 friend void swap(raw_hash_set& a,
3413 raw_hash_set& b) noexcept(noexcept(a.swap(b))) {
3414 a.swap(b);
3415 }
3416
3417 private:
3418 template <class Container, typename Enabler>
3419 friend struct absl::container_internal::hashtable_debug_internal::
3420 HashtableDebugAccess;
3421
3422 friend struct absl::container_internal::HashtableFreeFunctionsAccess;
3423
3424 struct FindElement {
3425 template <class K, class... Args>
3426 const_iterator operator()(const K& key, Args&&...) const {
3427 return s.find(key);
3428 }
3429 const raw_hash_set& s;
3430 };
3431
3432 struct HashElement {
3433 template <class K, class... Args>
3434 size_t operator()(const K& key, Args&&...) const {
3435 return h(key);
3436 }
3437 const hasher& h;
3438 };
3439
3440 template <class K1>
3441 struct EqualElement {
3442 template <class K2, class... Args>
3443 bool operator()(const K2& lhs, Args&&...) const {
3444 return eq(lhs, rhs);
3445 }
3446 const K1& rhs;
3447 const key_equal& eq;
3448 };
3449
3450 struct EmplaceDecomposable {
3451 template <class K, class... Args>
3452 std::pair<iterator, bool> operator()(const K& key, Args&&... args) const {
3453 auto res = s.find_or_prepare_insert(key);
3454 if (res.second) {
3455 s.emplace_at(res.first, std::forward<Args>(args)...);
3456 }
3457 return res;
3458 }
3459 raw_hash_set& s;
3460 };
3461
3462 template <bool do_destroy>
3463 struct InsertSlot {
3464 template <class K, class... Args>
3465 std::pair<iterator, bool> operator()(const K& key, Args&&...) && {
3466 auto res = s.find_or_prepare_insert(key);
3467 if (res.second) {
3468 s.transfer(res.first.slot(), &slot);
3469 } else if (do_destroy) {
3470 s.destroy(&slot);
3471 }
3472 return res;
3473 }
3474 raw_hash_set& s;
3475
3476 slot_type&& slot;
3477 };
3478
3479
3480 template <typename... Args>
3481 inline void construct(slot_type* slot, Args&&... args) {
3482 PolicyTraits::construct(&alloc_ref(), slot, std::forward<Args>(args)...);
3483 }
3484 inline void destroy(slot_type* slot) {
3485 PolicyTraits::destroy(&alloc_ref(), slot);
3486 }
3487 inline void transfer(slot_type* to, slot_type* from) {
3488 PolicyTraits::transfer(&alloc_ref(), to, from);
3489 }
3490
3491
3492
3493 template <class K = key_type>
3494 iterator find_soo(const key_arg<K>& key) {
3495 assert(is_soo());
3496 return empty() || !PolicyTraits::apply(EqualElement<K>{key, eq_ref()},
3497 PolicyTraits::element(soo_slot()))
3498 ? end()
3499 : soo_iterator();
3500 }
3501
3502 template <class K = key_type>
3503 iterator find_non_soo(const key_arg<K>& key, size_t hash) {
3504 assert(!is_soo());
3505 auto seq = probe(common(), hash);
3506 const ctrl_t* ctrl = control();
3507 while (true) {
3508 Group g{ctrl + seq.offset()};
3509 for (uint32_t i : g.Match(H2(hash))) {
3510 if (ABSL_PREDICT_TRUE(PolicyTraits::apply(
3511 EqualElement<K>{key, eq_ref()},
3512 PolicyTraits::element(slot_array() + seq.offset(i)))))
3513 return iterator_at(seq.offset(i));
3514 }
3515 if (ABSL_PREDICT_TRUE(g.MaskEmpty())) return end();
3516 seq.next();
3517 assert(seq.index() <= capacity() && "full table!");
3518 }
3519 }
3520
3521
3522
3523
3524 inline HashtablezInfoHandle try_sample_soo() {
3525 assert(is_soo());
3526 if (!ShouldSampleHashtablezInfo<CharAlloc>()) return HashtablezInfoHandle{};
3527 return Sample(sizeof(slot_type), sizeof(key_type), sizeof(value_type),
3528 SooCapacity());
3529 }
3530
3531 inline void destroy_slots() {
3532 assert(!is_soo());
3533 if (PolicyTraits::template destroy_is_trivial<Alloc>()) return;
3534 IterateOverFullSlots(
3535 common(), slot_array(),
3536 [&](const ctrl_t*, slot_type* slot)
3537 ABSL_ATTRIBUTE_ALWAYS_INLINE { this->destroy(slot); });
3538 }
3539
3540 inline void dealloc() {
3541 assert(capacity() != 0);
3542
3543 SanitizerUnpoisonMemoryRegion(slot_array(), sizeof(slot_type) * capacity());
3544 infoz().Unregister();
3545 Deallocate<BackingArrayAlignment(alignof(slot_type))>(
3546 &alloc_ref(), common().backing_array_start(),
3547 common().alloc_size(sizeof(slot_type), alignof(slot_type)));
3548 }
3549
3550 inline void destructor_impl() {
3551 if (capacity() == 0) return;
3552 if (is_soo()) {
3553 if (!empty()) {
3554 ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(destroy(soo_slot()));
3555 }
3556 return;
3557 }
3558 destroy_slots();
3559 dealloc();
3560 }
3561
3562
3563
3564
3565
3566 void erase_meta_only(const_iterator it) {
3567 assert(!is_soo());
3568 EraseMetaOnly(common(), static_cast<size_t>(it.control() - control()),
3569 sizeof(slot_type));
3570 }
3571
3572 size_t hash_of(slot_type* slot) const {
3573 return PolicyTraits::apply(HashElement{hash_ref()},
3574 PolicyTraits::element(slot));
3575 }
3576
3577
3578
3579
3580
3581
3582
3583
3584
3585 void resize(size_t new_capacity) {
3586 raw_hash_set::resize_impl(common(), new_capacity, HashtablezInfoHandle{});
3587 }
3588
3589
3590
3591
3592 void resize_with_soo_infoz(HashtablezInfoHandle forced_infoz) {
3593 assert(forced_infoz.IsSampled());
3594 raw_hash_set::resize_impl(common(), NextCapacity(SooCapacity()),
3595 forced_infoz);
3596 }
3597
3598
3599
3600 ABSL_ATTRIBUTE_NOINLINE static void resize_impl(
3601 CommonFields& common, size_t new_capacity,
3602 HashtablezInfoHandle forced_infoz) {
3603 raw_hash_set* set = reinterpret_cast<raw_hash_set*>(&common);
3604 assert(IsValidCapacity(new_capacity));
3605 assert(!set->fits_in_soo(new_capacity));
3606 const bool was_soo = set->is_soo();
3607 const bool had_soo_slot = was_soo && !set->empty();
3608 const ctrl_t soo_slot_h2 =
3609 had_soo_slot ? static_cast<ctrl_t>(H2(set->hash_of(set->soo_slot())))
3610 : ctrl_t::kEmpty;
3611 HashSetResizeHelper resize_helper(common, was_soo, had_soo_slot,
3612 forced_infoz);
3613
3614
3615
3616
3617
3618 if (PolicyTraits::transfer_uses_memcpy() || !had_soo_slot) {
3619 resize_helper.old_heap_or_soo() = common.heap_or_soo();
3620 } else {
3621 set->transfer(set->to_slot(resize_helper.old_soo_data()),
3622 set->soo_slot());
3623 }
3624 common.set_capacity(new_capacity);
3625
3626
3627
3628 const bool grow_single_group =
3629 resize_helper.InitializeSlots<CharAlloc, sizeof(slot_type),
3630 PolicyTraits::transfer_uses_memcpy(),
3631 SooEnabled(), alignof(slot_type)>(
3632 common, CharAlloc(set->alloc_ref()), soo_slot_h2, sizeof(key_type),
3633 sizeof(value_type));
3634
3635
3636 if (!SooEnabled() && resize_helper.old_capacity() == 0) {
3637
3638 return;
3639 }
3640 assert(resize_helper.old_capacity() > 0);
3641
3642 if (was_soo && !had_soo_slot) return;
3643
3644 slot_type* new_slots = set->slot_array();
3645 if (grow_single_group) {
3646 if (PolicyTraits::transfer_uses_memcpy()) {
3647
3648 return;
3649 }
3650 if (was_soo) {
3651 set->transfer(new_slots + resize_helper.SooSlotIndex(),
3652 to_slot(resize_helper.old_soo_data()));
3653 return;
3654 } else {
3655
3656
3657 resize_helper.GrowSizeIntoSingleGroup<PolicyTraits>(common,
3658 set->alloc_ref());
3659 }
3660 } else {
3661
3662 const auto insert_slot = [&](slot_type* slot) {
3663 size_t hash = PolicyTraits::apply(HashElement{set->hash_ref()},
3664 PolicyTraits::element(slot));
3665 auto target = find_first_non_full(common, hash);
3666 SetCtrl(common, target.offset, H2(hash), sizeof(slot_type));
3667 set->transfer(new_slots + target.offset, slot);
3668 return target.probe_length;
3669 };
3670 if (was_soo) {
3671 insert_slot(to_slot(resize_helper.old_soo_data()));
3672 return;
3673 } else {
3674 auto* old_slots = static_cast<slot_type*>(resize_helper.old_slots());
3675 size_t total_probe_length = 0;
3676 for (size_t i = 0; i != resize_helper.old_capacity(); ++i) {
3677 if (IsFull(resize_helper.old_ctrl()[i])) {
3678 total_probe_length += insert_slot(old_slots + i);
3679 }
3680 }
3681 common.infoz().RecordRehash(total_probe_length);
3682 }
3683 }
3684 resize_helper.DeallocateOld<alignof(slot_type)>(CharAlloc(set->alloc_ref()),
3685 sizeof(slot_type));
3686 }
3687
3688
3689
3690 static slot_type* to_slot(void* buf) { return static_cast<slot_type*>(buf); }
3691
3692
3693 static void move_common(bool that_is_full_soo, allocator_type& rhs_alloc,
3694 CommonFields& lhs, CommonFields&& rhs) {
3695 if (PolicyTraits::transfer_uses_memcpy() || !that_is_full_soo) {
3696 lhs = std::move(rhs);
3697 } else {
3698 lhs.move_non_heap_or_soo_fields(rhs);
3699
3700 PolicyTraits::transfer(&rhs_alloc, to_slot(lhs.soo_data()),
3701 to_slot(rhs.soo_data()));
3702 }
3703 }
3704
3705
3706
3707 void swap_common(raw_hash_set& that) {
3708 using std::swap;
3709 if (PolicyTraits::transfer_uses_memcpy()) {
3710 swap(common(), that.common());
3711 return;
3712 }
3713 CommonFields tmp = CommonFields::CreateDefault<SooEnabled()>();
3714 const bool that_is_full_soo = that.is_full_soo();
3715 move_common(that_is_full_soo, that.alloc_ref(), tmp,
3716 std::move(that.common()));
3717 move_common(is_full_soo(), alloc_ref(), that.common(), std::move(common()));
3718 move_common(that_is_full_soo, that.alloc_ref(), common(), std::move(tmp));
3719 }
3720
3721 void maybe_increment_generation_or_rehash_on_move() {
3722 if (!SwisstableGenerationsEnabled() || capacity() == 0 || is_soo()) {
3723 return;
3724 }
3725 common().increment_generation();
3726 if (!empty() && common().should_rehash_for_bug_detection_on_move()) {
3727 resize(capacity());
3728 }
3729 }
3730
3731 template <bool propagate_alloc>
3732 raw_hash_set& assign_impl(raw_hash_set&& that) {
3733
3734
3735 destructor_impl();
3736 move_common(that.is_full_soo(), that.alloc_ref(), common(),
3737 std::move(that.common()));
3738
3739 hash_ref() = that.hash_ref();
3740 eq_ref() = that.eq_ref();
3741 CopyAlloc(alloc_ref(), that.alloc_ref(),
3742 std::integral_constant<bool, propagate_alloc>());
3743 that.common() = CommonFields::CreateDefault<SooEnabled()>();
3744 maybe_increment_generation_or_rehash_on_move();
3745 return *this;
3746 }
3747
3748 raw_hash_set& move_elements_allocs_unequal(raw_hash_set&& that) {
3749 const size_t size = that.size();
3750 if (size == 0) return *this;
3751 reserve(size);
3752 for (iterator it = that.begin(); it != that.end(); ++it) {
3753 insert(std::move(PolicyTraits::element(it.slot())));
3754 that.destroy(it.slot());
3755 }
3756 if (!that.is_soo()) that.dealloc();
3757 that.common() = CommonFields::CreateDefault<SooEnabled()>();
3758 maybe_increment_generation_or_rehash_on_move();
3759 return *this;
3760 }
3761
3762 raw_hash_set& move_assign(raw_hash_set&& that,
3763 std::true_type ) {
3764 return assign_impl<true>(std::move(that));
3765 }
3766 raw_hash_set& move_assign(raw_hash_set&& that,
3767 std::false_type ) {
3768 if (alloc_ref() == that.alloc_ref()) {
3769 return assign_impl<false>(std::move(that));
3770 }
3771
3772 assert(this != &that);
3773 destructor_impl();
3774
3775
3776
3777
3778 hash_ref() = that.hash_ref();
3779 eq_ref() = that.eq_ref();
3780 return move_elements_allocs_unequal(std::move(that));
3781 }
3782
3783 template <class K>
3784 std::pair<iterator, bool> find_or_prepare_insert_soo(const K& key) {
3785 if (empty()) {
3786 const HashtablezInfoHandle infoz = try_sample_soo();
3787 if (infoz.IsSampled()) {
3788 resize_with_soo_infoz(infoz);
3789 } else {
3790 common().set_full_soo();
3791 return {soo_iterator(), true};
3792 }
3793 } else if (PolicyTraits::apply(EqualElement<K>{key, eq_ref()},
3794 PolicyTraits::element(soo_slot()))) {
3795 return {soo_iterator(), false};
3796 } else {
3797 resize(NextCapacity(SooCapacity()));
3798 }
3799 const size_t index =
3800 PrepareInsertAfterSoo(hash_ref()(key), sizeof(slot_type), common());
3801 return {iterator_at(index), true};
3802 }
3803
3804 template <class K>
3805 std::pair<iterator, bool> find_or_prepare_insert_non_soo(const K& key) {
3806 assert(!is_soo());
3807 prefetch_heap_block();
3808 auto hash = hash_ref()(key);
3809 auto seq = probe(common(), hash);
3810 const ctrl_t* ctrl = control();
3811 while (true) {
3812 Group g{ctrl + seq.offset()};
3813 for (uint32_t i : g.Match(H2(hash))) {
3814 if (ABSL_PREDICT_TRUE(PolicyTraits::apply(
3815 EqualElement<K>{key, eq_ref()},
3816 PolicyTraits::element(slot_array() + seq.offset(i)))))
3817 return {iterator_at(seq.offset(i)), false};
3818 }
3819 auto mask_empty = g.MaskEmpty();
3820 if (ABSL_PREDICT_TRUE(mask_empty)) {
3821 size_t target = seq.offset(
3822 GetInsertionOffset(mask_empty, capacity(), hash, control()));
3823 return {iterator_at(PrepareInsertNonSoo(common(), hash,
3824 FindInfo{target, seq.index()},
3825 GetPolicyFunctions())),
3826 true};
3827 }
3828 seq.next();
3829 assert(seq.index() <= capacity() && "full table!");
3830 }
3831 }
3832
3833 protected:
3834
3835
3836 template <class K>
3837 void AssertHashEqConsistent(ABSL_ATTRIBUTE_UNUSED const K& key) {
3838 #ifndef NDEBUG
3839 if (empty()) return;
3840
3841 const size_t hash_of_arg = hash_ref()(key);
3842 const auto assert_consistent = [&](const ctrl_t*, slot_type* slot) {
3843 const value_type& element = PolicyTraits::element(slot);
3844 const bool is_key_equal =
3845 PolicyTraits::apply(EqualElement<K>{key, eq_ref()}, element);
3846 if (!is_key_equal) return;
3847
3848 const size_t hash_of_slot =
3849 PolicyTraits::apply(HashElement{hash_ref()}, element);
3850 const bool is_hash_equal = hash_of_arg == hash_of_slot;
3851 if (!is_hash_equal) {
3852
3853
3854
3855 const size_t once_more_hash_arg = hash_ref()(key);
3856 assert(hash_of_arg == once_more_hash_arg && "hash is not idempotent.");
3857 const size_t once_more_hash_slot =
3858 PolicyTraits::apply(HashElement{hash_ref()}, element);
3859 assert(hash_of_slot == once_more_hash_slot &&
3860 "hash is not idempotent.");
3861 const bool once_more_eq =
3862 PolicyTraits::apply(EqualElement<K>{key, eq_ref()}, element);
3863 assert(is_key_equal == once_more_eq && "equality is not idempotent.");
3864 }
3865 assert((!is_key_equal || is_hash_equal) &&
3866 "eq(k1, k2) must imply that hash(k1) == hash(k2). "
3867 "hash/eq functors are inconsistent.");
3868 };
3869
3870 if (is_soo()) {
3871 assert_consistent( nullptr, soo_slot());
3872 return;
3873 }
3874
3875 if (capacity() > 16) return;
3876 IterateOverFullSlots(common(), slot_array(), assert_consistent);
3877 #endif
3878 }
3879
3880
3881
3882
3883 template <class K>
3884 std::pair<iterator, bool> find_or_prepare_insert(const K& key) {
3885 AssertHashEqConsistent(key);
3886 if (is_soo()) return find_or_prepare_insert_soo(key);
3887 return find_or_prepare_insert_non_soo(key);
3888 }
3889
3890
3891
3892
3893
3894
3895
3896
3897
3898 template <class... Args>
3899 void emplace_at(iterator iter, Args&&... args) {
3900 construct(iter.slot(), std::forward<Args>(args)...);
3901
3902 assert(PolicyTraits::apply(FindElement{*this}, *iter) == iter &&
3903 "constructed value does not match the lookup key");
3904 }
3905
3906 iterator iterator_at(size_t i) ABSL_ATTRIBUTE_LIFETIME_BOUND {
3907 return {control() + i, slot_array() + i, common().generation_ptr()};
3908 }
3909 const_iterator iterator_at(size_t i) const ABSL_ATTRIBUTE_LIFETIME_BOUND {
3910 return const_cast<raw_hash_set*>(this)->iterator_at(i);
3911 }
3912
3913 reference unchecked_deref(iterator it) { return it.unchecked_deref(); }
3914
3915 private:
3916 friend struct RawHashSetTestOnlyAccess;
3917
3918
3919
3920
3921
3922
3923
3924
3925
3926
3927
3928 size_t growth_left() const {
3929 assert(!is_soo());
3930 return common().growth_left();
3931 }
3932
3933 GrowthInfo& growth_info() {
3934 assert(!is_soo());
3935 return common().growth_info();
3936 }
3937 GrowthInfo growth_info() const {
3938 assert(!is_soo());
3939 return common().growth_info();
3940 }
3941
3942
3943
3944
3945 void prefetch_heap_block() const {
3946 assert(!is_soo());
3947 #if ABSL_HAVE_BUILTIN(__builtin_prefetch) || defined(__GNUC__)
3948 __builtin_prefetch(control(), 0, 1);
3949 #endif
3950 }
3951
3952 CommonFields& common() { return settings_.template get<0>(); }
3953 const CommonFields& common() const { return settings_.template get<0>(); }
3954
3955 ctrl_t* control() const {
3956 assert(!is_soo());
3957 return common().control();
3958 }
3959 slot_type* slot_array() const {
3960 assert(!is_soo());
3961 return static_cast<slot_type*>(common().slot_array());
3962 }
3963 slot_type* soo_slot() {
3964 assert(is_soo());
3965 return static_cast<slot_type*>(common().soo_data());
3966 }
3967 const slot_type* soo_slot() const {
3968 return const_cast<raw_hash_set*>(this)->soo_slot();
3969 }
3970 iterator soo_iterator() {
3971 return {SooControl(), soo_slot(), common().generation_ptr()};
3972 }
3973 const_iterator soo_iterator() const {
3974 return const_cast<raw_hash_set*>(this)->soo_iterator();
3975 }
3976 HashtablezInfoHandle infoz() {
3977 assert(!is_soo());
3978 return common().infoz();
3979 }
3980
3981 hasher& hash_ref() { return settings_.template get<1>(); }
3982 const hasher& hash_ref() const { return settings_.template get<1>(); }
3983 key_equal& eq_ref() { return settings_.template get<2>(); }
3984 const key_equal& eq_ref() const { return settings_.template get<2>(); }
3985 allocator_type& alloc_ref() { return settings_.template get<3>(); }
3986 const allocator_type& alloc_ref() const {
3987 return settings_.template get<3>();
3988 }
3989
3990 static const void* get_hash_ref_fn(const CommonFields& common) {
3991 auto* h = reinterpret_cast<const raw_hash_set*>(&common);
3992 return &h->hash_ref();
3993 }
3994 static void transfer_slot_fn(void* set, void* dst, void* src) {
3995 auto* h = static_cast<raw_hash_set*>(set);
3996 h->transfer(static_cast<slot_type*>(dst), static_cast<slot_type*>(src));
3997 }
3998
3999 static void dealloc_fn(CommonFields& common, const PolicyFunctions&) {
4000 auto* set = reinterpret_cast<raw_hash_set*>(&common);
4001
4002
4003 SanitizerUnpoisonMemoryRegion(common.slot_array(),
4004 sizeof(slot_type) * common.capacity());
4005
4006 common.infoz().Unregister();
4007 Deallocate<BackingArrayAlignment(alignof(slot_type))>(
4008 &set->alloc_ref(), common.backing_array_start(),
4009 common.alloc_size(sizeof(slot_type), alignof(slot_type)));
4010 }
4011
4012 static const PolicyFunctions& GetPolicyFunctions() {
4013 static constexpr PolicyFunctions value = {
4014 sizeof(slot_type),
4015
4016
4017 std::is_empty<hasher>::value ? &GetHashRefForEmptyHasher
4018 : &raw_hash_set::get_hash_ref_fn,
4019 PolicyTraits::template get_hash_slot_fn<hasher>(),
4020 PolicyTraits::transfer_uses_memcpy()
4021 ? TransferRelocatable<sizeof(slot_type)>
4022 : &raw_hash_set::transfer_slot_fn,
4023 (std::is_same<SlotAlloc, std::allocator<slot_type>>::value
4024 ? &DeallocateStandard<alignof(slot_type)>
4025 : &raw_hash_set::dealloc_fn),
4026 &raw_hash_set::resize_impl,
4027 };
4028 return value;
4029 }
4030
4031
4032
4033
4034 absl::container_internal::CompressedTuple<CommonFields, hasher, key_equal,
4035 allocator_type>
4036 settings_{CommonFields::CreateDefault<SooEnabled()>(), hasher{},
4037 key_equal{}, allocator_type{}};
4038 };
4039
4040
4041 struct HashtableFreeFunctionsAccess {
4042 template <class Predicate, typename Set>
4043 static typename Set::size_type EraseIf(Predicate& pred, Set* c) {
4044 if (c->empty()) {
4045 return 0;
4046 }
4047 if (c->is_soo()) {
4048 auto it = c->soo_iterator();
4049 if (!pred(*it)) {
4050 assert(c->size() == 1 && "hash table was modified unexpectedly");
4051 return 0;
4052 }
4053 c->destroy(it.slot());
4054 c->common().set_empty_soo();
4055 return 1;
4056 }
4057 ABSL_ATTRIBUTE_UNUSED const size_t original_size_for_assert = c->size();
4058 size_t num_deleted = 0;
4059 IterateOverFullSlots(
4060 c->common(), c->slot_array(), [&](const ctrl_t* ctrl, auto* slot) {
4061 if (pred(Set::PolicyTraits::element(slot))) {
4062 c->destroy(slot);
4063 EraseMetaOnly(c->common(), static_cast<size_t>(ctrl - c->control()),
4064 sizeof(*slot));
4065 ++num_deleted;
4066 }
4067 });
4068
4069
4070 assert(original_size_for_assert - num_deleted == c->size() &&
4071 "hash table was modified unexpectedly");
4072 return num_deleted;
4073 }
4074
4075 template <class Callback, typename Set>
4076 static void ForEach(Callback& cb, Set* c) {
4077 if (c->empty()) {
4078 return;
4079 }
4080 if (c->is_soo()) {
4081 cb(*c->soo_iterator());
4082 return;
4083 }
4084 using ElementTypeWithConstness = decltype(*c->begin());
4085 IterateOverFullSlots(
4086 c->common(), c->slot_array(), [&cb](const ctrl_t*, auto* slot) {
4087 ElementTypeWithConstness& element = Set::PolicyTraits::element(slot);
4088 cb(element);
4089 });
4090 }
4091 };
4092
4093
4094 template <typename P, typename H, typename E, typename A, typename Predicate>
4095 typename raw_hash_set<P, H, E, A>::size_type EraseIf(
4096 Predicate& pred, raw_hash_set<P, H, E, A>* c) {
4097 return HashtableFreeFunctionsAccess::EraseIf(pred, c);
4098 }
4099
4100
4101 template <typename P, typename H, typename E, typename A, typename Callback>
4102 void ForEach(Callback& cb, raw_hash_set<P, H, E, A>* c) {
4103 return HashtableFreeFunctionsAccess::ForEach(cb, c);
4104 }
4105 template <typename P, typename H, typename E, typename A, typename Callback>
4106 void ForEach(Callback& cb, const raw_hash_set<P, H, E, A>* c) {
4107 return HashtableFreeFunctionsAccess::ForEach(cb, c);
4108 }
4109
4110 namespace hashtable_debug_internal {
4111 template <typename Set>
4112 struct HashtableDebugAccess<Set, absl::void_t<typename Set::raw_hash_set>> {
4113 using Traits = typename Set::PolicyTraits;
4114 using Slot = typename Traits::slot_type;
4115
4116 static size_t GetNumProbes(const Set& set,
4117 const typename Set::key_type& key) {
4118 if (set.is_soo()) return 0;
4119 size_t num_probes = 0;
4120 size_t hash = set.hash_ref()(key);
4121 auto seq = probe(set.common(), hash);
4122 const ctrl_t* ctrl = set.control();
4123 while (true) {
4124 container_internal::Group g{ctrl + seq.offset()};
4125 for (uint32_t i : g.Match(container_internal::H2(hash))) {
4126 if (Traits::apply(
4127 typename Set::template EqualElement<typename Set::key_type>{
4128 key, set.eq_ref()},
4129 Traits::element(set.slot_array() + seq.offset(i))))
4130 return num_probes;
4131 ++num_probes;
4132 }
4133 if (g.MaskEmpty()) return num_probes;
4134 seq.next();
4135 ++num_probes;
4136 }
4137 }
4138
4139 static size_t AllocatedByteSize(const Set& c) {
4140 size_t capacity = c.capacity();
4141 if (capacity == 0) return 0;
4142 size_t m =
4143 c.is_soo() ? 0 : c.common().alloc_size(sizeof(Slot), alignof(Slot));
4144
4145 size_t per_slot = Traits::space_used(static_cast<const Slot*>(nullptr));
4146 if (per_slot != ~size_t{}) {
4147 m += per_slot * c.size();
4148 } else {
4149 for (auto it = c.begin(); it != c.end(); ++it) {
4150 m += Traits::space_used(it.slot());
4151 }
4152 }
4153 return m;
4154 }
4155 };
4156
4157 }
4158 }
4159 ABSL_NAMESPACE_END
4160 }
4161
4162 #undef ABSL_SWISSTABLE_ENABLE_GENERATIONS
4163 #undef ABSL_SWISSTABLE_IGNORE_UNINITIALIZED
4164 #undef ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN
4165
4166 #endif