@@ -740,14 +740,19 @@ class CommonFields : public HashtablezInfoHandle {
740
740
control_(that.control_),
741
741
slots_(that.slots_),
742
742
size_(that.size_),
743
- capacity_(that.capacity_) {
743
+ capacity_(that.capacity_),
744
+ growth_left_(that.growth_left_) {
744
745
that.control_ = EmptyGroup ();
745
746
that.slots_ = nullptr ;
746
747
that.size_ = 0 ;
747
748
that.capacity_ = 0 ;
749
+ that.growth_left_ = 0 ;
748
750
}
749
751
CommonFields& operator =(CommonFields&&) = default ;
750
752
753
+ HashtablezInfoHandle& infoz () { return *this ; }
754
+ const HashtablezInfoHandle& infoz () const { return *this ; }
755
+
751
756
// TODO(b/259599413): Investigate removing some of these fields:
752
757
// - control/slots can be derived from each other
753
758
// - size can be moved into the slot array
@@ -768,8 +773,8 @@ class CommonFields : public HashtablezInfoHandle {
768
773
// The total number of available slots.
769
774
size_t capacity_ = 0 ;
770
775
771
- HashtablezInfoHandle& infoz () { return * this ; }
772
- const HashtablezInfoHandle& infoz () const { return * this ; }
776
+ // The number of slots we can still fill without needing to rehash.
777
+ size_t growth_left_ = 0 ;
773
778
};
774
779
775
780
// Returns he number of "cloned control bytes".
@@ -968,21 +973,20 @@ extern template FindInfo find_first_non_full(const CommonFields&, size_t);
968
973
// performance critical routines.
969
974
FindInfo find_first_non_full_outofline (const CommonFields&, size_t );
970
975
971
- inline void ResetGrowthLeft (CommonFields& common, size_t & growth_left ) {
972
- growth_left = CapacityToGrowth (common.capacity_ ) - common.size_ ;
976
+ inline void ResetGrowthLeft (CommonFields& common) {
977
+ common. growth_left_ = CapacityToGrowth (common.capacity_ ) - common.size_ ;
973
978
}
974
979
975
980
// Sets `ctrl` to `{kEmpty, kSentinel, ..., kEmpty}`, marking the entire
976
981
// array as marked as empty.
977
- inline void ResetCtrl (CommonFields& common, size_t & growth_left,
978
- size_t slot_size) {
982
+ inline void ResetCtrl (CommonFields& common, size_t slot_size) {
979
983
const size_t capacity = common.capacity_ ;
980
984
ctrl_t * ctrl = common.control_ ;
981
985
std::memset (ctrl, static_cast <int8_t >(ctrl_t ::kEmpty ),
982
986
capacity + 1 + NumClonedBytes ());
983
987
ctrl[capacity] = ctrl_t ::kSentinel ;
984
988
SanitizerPoisonMemoryRegion (common.slots_ , slot_size * capacity);
985
- ResetGrowthLeft (common, growth_left );
989
+ ResetGrowthLeft (common);
986
990
}
987
991
988
992
// Sets `ctrl[i]` to `h`.
@@ -1027,8 +1031,7 @@ inline size_t AllocSize(size_t capacity, size_t slot_size, size_t slot_align) {
1027
1031
}
1028
1032
1029
1033
template <typename Alloc, size_t SizeOfSlot, size_t AlignOfSlot>
1030
- ABSL_ATTRIBUTE_NOINLINE void InitializeSlots (CommonFields& c,
1031
- size_t & growth_left, Alloc alloc) {
1034
+ ABSL_ATTRIBUTE_NOINLINE void InitializeSlots (CommonFields& c, Alloc alloc) {
1032
1035
assert (c.capacity_ );
1033
1036
// Folks with custom allocators often make unwarranted assumptions about the
1034
1037
// behavior of their classes vis-a-vis trivial destructability and what
@@ -1045,7 +1048,7 @@ ABSL_ATTRIBUTE_NOINLINE void InitializeSlots(CommonFields& c,
1045
1048
Allocate<AlignOfSlot>(&alloc, AllocSize (cap, SizeOfSlot, AlignOfSlot)));
1046
1049
c.control_ = reinterpret_cast <ctrl_t *>(mem);
1047
1050
c.slots_ = mem + SlotOffset (cap, AlignOfSlot);
1048
- ResetCtrl (c, growth_left, SizeOfSlot);
1051
+ ResetCtrl (c, SizeOfSlot);
1049
1052
if (sample_size) {
1050
1053
c.infoz () = Sample (sample_size);
1051
1054
}
@@ -1073,12 +1076,11 @@ struct PolicyFunctions {
1073
1076
// ClearBackingArray clears the backing array, either modifying it in place,
1074
1077
// or creating a new one based on the value of "reuse".
1075
1078
// REQUIRES: c.capacity > 0
1076
- void ClearBackingArray (CommonFields& c, size_t & growth_left ,
1077
- const PolicyFunctions& policy, bool reuse);
1079
+ void ClearBackingArray (CommonFields& c, const PolicyFunctions& policy ,
1080
+ bool reuse);
1078
1081
1079
1082
// Type-erased version of raw_hash_set::erase_meta_only.
1080
- void EraseMetaOnly (CommonFields& c, size_t & growth_left, ctrl_t * it,
1081
- size_t slot_size);
1083
+ void EraseMetaOnly (CommonFields& c, ctrl_t * it, size_t slot_size);
1082
1084
1083
1085
// Function to place in PolicyFunctions::dealloc for raw_hash_sets
1084
1086
// that are using std::allocator. This allows us to share the same
@@ -1106,7 +1108,7 @@ ABSL_ATTRIBUTE_NOINLINE void TransferRelocatable(void*, void* dst, void* src) {
1106
1108
}
1107
1109
1108
1110
// Type-erased version of raw_hash_set::drop_deletes_without_resize.
1109
- void DropDeletesWithoutResize (CommonFields& common, size_t & growth_left,
1111
+ void DropDeletesWithoutResize (CommonFields& common,
1110
1112
const PolicyFunctions& policy, void * tmp_space);
1111
1113
1112
1114
// A SwissTable.
@@ -1130,7 +1132,7 @@ void DropDeletesWithoutResize(CommonFields& common, size_t& growth_left,
1130
1132
// the storage of the hashtable will be allocated and the elements will be
1131
1133
// constructed and destroyed.
1132
1134
template <class Policy , class Hash , class Eq , class Alloc >
1133
- class raw_hash_set : private CommonFields {
1135
+ class raw_hash_set {
1134
1136
using PolicyTraits = hash_policy_traits<Policy>;
1135
1137
using KeyArgImpl =
1136
1138
KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>;
@@ -1337,7 +1339,7 @@ class raw_hash_set : private CommonFields {
1337
1339
size_t bucket_count, const hasher& hash = hasher(),
1338
1340
const key_equal& eq = key_equal(),
1339
1341
const allocator_type& alloc = allocator_type())
1340
- : settings_(0u , hash, eq, alloc) {
1342
+ : settings_(CommonFields{} , hash, eq, alloc) {
1341
1343
if (bucket_count) {
1342
1344
common ().capacity_ = NormalizeCapacity (bucket_count);
1343
1345
initialize_slots ();
@@ -1462,15 +1464,13 @@ class raw_hash_set : private CommonFields {
1462
1464
: // Hash, equality and allocator are copied instead of moved because
1463
1465
// `that` must be left valid. If Hash is std::function<Key>, moving it
1464
1466
// would create a nullptr functor that cannot be called.
1465
- CommonFields(std::move(that)),
1466
- settings_(absl::exchange(that.growth_left(), size_t{0 }),
1467
+ settings_(absl::exchange(that.common(), CommonFields{}),
1467
1468
that.hash_ref(), that.eq_ref(), that.alloc_ref()) {}
1468
1469
1469
1470
raw_hash_set (raw_hash_set&& that, const allocator_type& a)
1470
- : settings_(0 , that.hash_ref(), that.eq_ref(), a) {
1471
+ : settings_(CommonFields{} , that.hash_ref(), that.eq_ref(), a) {
1471
1472
if (a == that.alloc_ref ()) {
1472
1473
std::swap (common (), that.common ());
1473
- std::swap (growth_left (), that.growth_left ());
1474
1474
} else {
1475
1475
reserve (that.size ());
1476
1476
// Note: this will copy elements of dense_set and unordered_set instead of
@@ -1545,7 +1545,7 @@ class raw_hash_set : private CommonFields {
1545
1545
// Already guaranteed to be empty; so nothing to do.
1546
1546
} else {
1547
1547
destroy_slots ();
1548
- ClearBackingArray (common (), growth_left (), GetPolicyFunctions (),
1548
+ ClearBackingArray (common (), GetPolicyFunctions (),
1549
1549
/* reuse=*/ cap < 128 );
1550
1550
}
1551
1551
}
@@ -1843,7 +1843,6 @@ class raw_hash_set : private CommonFields {
1843
1843
typename AllocTraits::propagate_on_container_swap{})) {
1844
1844
using std::swap;
1845
1845
swap (common (), that.common ());
1846
- swap (growth_left (), that.growth_left ());
1847
1846
swap (hash_ref (), that.hash_ref ());
1848
1847
swap (eq_ref (), that.eq_ref ());
1849
1848
SwapAlloc (alloc_ref (), that.alloc_ref (),
@@ -1853,7 +1852,7 @@ class raw_hash_set : private CommonFields {
1853
1852
void rehash (size_t n) {
1854
1853
if (n == 0 && capacity () == 0 ) return ;
1855
1854
if (n == 0 && size () == 0 ) {
1856
- ClearBackingArray (common (), growth_left (), GetPolicyFunctions (),
1855
+ ClearBackingArray (common (), GetPolicyFunctions (),
1857
1856
/* reuse=*/ false );
1858
1857
return ;
1859
1858
}
@@ -2079,7 +2078,7 @@ class raw_hash_set : private CommonFields {
2079
2078
// This merely updates the pertinent control byte. This can be used in
2080
2079
// conjunction with Policy::transfer to move the object to another place.
2081
2080
void erase_meta_only (const_iterator it) {
2082
- EraseMetaOnly (common (), growth_left (), it.inner_ .ctrl_ , sizeof (slot_type));
2081
+ EraseMetaOnly (common (), it.inner_ .ctrl_ , sizeof (slot_type));
2083
2082
}
2084
2083
2085
2084
// Allocates a backing array for `self` and initializes its control bytes.
@@ -2094,7 +2093,7 @@ class raw_hash_set : private CommonFields {
2094
2093
using CharAlloc =
2095
2094
typename absl::allocator_traits<Alloc>::template rebind_alloc<char >;
2096
2095
InitializeSlots<CharAlloc, sizeof (slot_type), alignof (slot_type)>(
2097
- common (), growth_left (), CharAlloc (alloc_ref ()));
2096
+ common (), CharAlloc (alloc_ref ()));
2098
2097
}
2099
2098
2100
2099
ABSL_ATTRIBUTE_NOINLINE void resize (size_t new_capacity) {
@@ -2134,8 +2133,7 @@ class raw_hash_set : private CommonFields {
2134
2133
inline void drop_deletes_without_resize () {
2135
2134
// Stack-allocate space for swapping elements.
2136
2135
alignas (slot_type) unsigned char tmp[sizeof (slot_type)];
2137
- DropDeletesWithoutResize (common (), growth_left (), GetPolicyFunctions (),
2138
- tmp);
2136
+ DropDeletesWithoutResize (common (), GetPolicyFunctions (), tmp);
2139
2137
}
2140
2138
2141
2139
// Called whenever the table *might* need to conditionally grow.
@@ -2305,15 +2303,15 @@ class raw_hash_set : private CommonFields {
2305
2303
// side-effect.
2306
2304
//
2307
2305
// See `CapacityToGrowth()`.
2308
- size_t & growth_left () { return settings_. template get < 0 >() ; }
2306
+ size_t & growth_left () { return common (). growth_left_ ; }
2309
2307
2310
2308
// Prefetch the heap-allocated memory region to resolve potential TLB misses.
2311
2309
// This is intended to overlap with execution of calculating the hash for a
2312
2310
// key.
2313
2311
void prefetch_heap_block () const { base_internal::PrefetchT2 (control ()); }
2314
2312
2315
- CommonFields& common () { return * this ; }
2316
- const CommonFields& common () const { return * this ; }
2313
+ CommonFields& common () { return settings_. template get < 0 >() ; }
2314
+ const CommonFields& common () const { return settings_. template get < 0 >() ; }
2317
2315
2318
2316
ctrl_t * control () const { return common ().control_ ; }
2319
2317
slot_type* slot_array () const {
@@ -2369,13 +2367,12 @@ class raw_hash_set : private CommonFields {
2369
2367
return value;
2370
2368
}
2371
2369
2372
- // Bundle together growth_left (number of slots that can be filled without
2373
- // rehashing) plus other objects which might be empty. CompressedTuple will
2374
- // ensure that sizeof is not affected by any of the empty fields that occur
2375
- // after growth_left.
2376
- absl::container_internal::CompressedTuple<size_t /* growth_left */ , hasher,
2377
- key_equal, allocator_type>
2378
- settings_{0u , hasher{}, key_equal{}, allocator_type{}};
2370
+ // Bundle together CommonFields plus other objects which might be empty.
2371
+ // CompressedTuple will ensure that sizeof is not affected by any of the empty
2372
+ // fields that occur after CommonFields.
2373
+ absl::container_internal::CompressedTuple<CommonFields, hasher, key_equal,
2374
+ allocator_type>
2375
+ settings_{CommonFields{}, hasher{}, key_equal{}, allocator_type{}};
2379
2376
};
2380
2377
2381
2378
// Erases all elements that satisfy the predicate `pred` from the container `c`.
0 commit comments