@@ -2012,8 +2012,8 @@ struct PolicyFunctions {
2012
2012
// TODO(b/382423690): consider having separate `transfer` and `transfer_n`.
2013
2013
void (*transfer)(void * set, void * dst_slot, void * src_slot, size_t count);
2014
2014
2015
- // Returns the pointer to the allocator stored in the set.
2016
- void * (*alloc_fn )(CommonFields& common);
2015
+ // Returns the pointer to the CharAlloc stored in the set.
2016
+ void * (*get_char_alloc )(CommonFields& common);
2017
2017
2018
2018
// Allocates n bytes for the backing store for common.
2019
2019
void * (*alloc)(void * alloc, size_t n);
@@ -2159,7 +2159,7 @@ void GrowFullSooTableToNextCapacity(CommonFields& common, size_t soo_slot_hash,
2159
2159
// The decision to sample was already made during the first insertion.
2160
2160
RawHashSetLayout layout (kNewCapacity , slot_size, slot_align,
2161
2161
/* has_infoz=*/ false );
2162
- void * alloc = policy.alloc_fn (common);
2162
+ void * alloc = policy.get_char_alloc (common);
2163
2163
char * mem = static_cast <char *>(policy.alloc (alloc, layout.alloc_size ()));
2164
2164
const GenerationType old_generation = common.generation ();
2165
2165
common.set_generation_ptr (
@@ -2675,7 +2675,7 @@ class raw_hash_set {
2675
2675
2676
2676
raw_hash_set (const raw_hash_set& that)
2677
2677
: raw_hash_set(that, AllocTraits::select_on_container_copy_construction(
2678
- that.alloc_ref( ))) {}
2678
+ allocator_type ( that.char_alloc_ref() ))) {}
2679
2679
2680
2680
raw_hash_set (const raw_hash_set& that, const allocator_type& a)
2681
2681
: raw_hash_set(GrowthToLowerboundCapacity(that.size()), that.hash_ref(),
@@ -2758,7 +2758,7 @@ class raw_hash_set {
2758
2758
settings_(PolicyTraits::transfer_uses_memcpy() || !that.is_full_soo()
2759
2759
? std::move(that.common())
2760
2760
: CommonFields{full_soo_tag_t {}},
2761
- that.hash_ref(), that.eq_ref(), that.alloc_ref ()) {
2761
+ that.hash_ref(), that.eq_ref(), that.char_alloc_ref ()) {
2762
2762
if (!PolicyTraits::transfer_uses_memcpy () && that.is_full_soo ()) {
2763
2763
transfer (soo_slot (), that.soo_slot ());
2764
2764
}
@@ -2769,7 +2769,7 @@ class raw_hash_set {
2769
2769
raw_hash_set (raw_hash_set&& that, const allocator_type& a)
2770
2770
: settings_(CommonFields::CreateDefault<SooEnabled()>(), that.hash_ref(),
2771
2771
that.eq_ref(), a) {
2772
- if (a == that.alloc_ref ()) {
2772
+ if (CharAlloc (a) == that.char_alloc_ref ()) {
2773
2773
swap_common (that);
2774
2774
annotate_for_bug_detection_on_move (that);
2775
2775
} else {
@@ -2786,7 +2786,9 @@ class raw_hash_set {
2786
2786
// is an exact match for that.size(). If this->capacity() is too big, then
2787
2787
// it would make iteration very slow to reuse the allocation. Maybe we can
2788
2788
// do the same heuristic as clear() and reuse if it's small enough.
2789
- raw_hash_set tmp (that, propagate_alloc ? that.alloc_ref () : alloc_ref ());
2789
+ allocator_type alloc (propagate_alloc ? that.char_alloc_ref ()
2790
+ : char_alloc_ref ());
2791
+ raw_hash_set tmp (that, alloc);
2790
2792
// NOLINTNEXTLINE: not returning *this for performance.
2791
2793
return assign_impl<propagate_alloc>(std::move (tmp));
2792
2794
}
@@ -3112,7 +3114,8 @@ class raw_hash_set {
3112
3114
auto res = find_or_prepare_insert (key);
3113
3115
if (res.second ) {
3114
3116
slot_type* slot = res.first .slot ();
3115
- std::forward<F>(f)(constructor (&alloc_ref (), &slot));
3117
+ allocator_type alloc (char_alloc_ref ());
3118
+ std::forward<F>(f)(constructor (&alloc, &slot));
3116
3119
ABSL_SWISSTABLE_ASSERT (!slot);
3117
3120
}
3118
3121
return res.first ;
@@ -3216,7 +3219,8 @@ class raw_hash_set {
3216
3219
AssertNotDebugCapacity ();
3217
3220
AssertIsFull (position.control (), position.inner_ .generation (),
3218
3221
position.inner_ .generation_ptr (), " extract()" );
3219
- auto node = CommonAccess::Transfer<node_type>(alloc_ref (), position.slot ());
3222
+ allocator_type alloc (char_alloc_ref ());
3223
+ auto node = CommonAccess::Transfer<node_type>(alloc, position.slot ());
3220
3224
if (is_soo ()) {
3221
3225
common ().set_empty_soo ();
3222
3226
} else {
@@ -3242,7 +3246,7 @@ class raw_hash_set {
3242
3246
swap_common (that);
3243
3247
swap (hash_ref (), that.hash_ref ());
3244
3248
swap (eq_ref (), that.eq_ref ());
3245
- SwapAlloc (alloc_ref (), that.alloc_ref (),
3249
+ SwapAlloc (char_alloc_ref (), that.char_alloc_ref (),
3246
3250
typename AllocTraits::propagate_on_container_swap{});
3247
3251
}
3248
3252
@@ -3364,7 +3368,9 @@ class raw_hash_set {
3364
3368
3365
3369
hasher hash_function () const { return hash_ref (); }
3366
3370
key_equal key_eq () const { return eq_ref (); }
3367
- allocator_type get_allocator () const { return alloc_ref (); }
3371
+ allocator_type get_allocator () const {
3372
+ return allocator_type (char_alloc_ref ());
3373
+ }
3368
3374
3369
3375
friend bool operator ==(const raw_hash_set& a, const raw_hash_set& b) {
3370
3376
if (a.size () != b.size ()) return false ;
@@ -3431,7 +3437,7 @@ class raw_hash_set {
3431
3437
struct EqualElement {
3432
3438
template <class K2 , class ... Args>
3433
3439
bool operator ()(const K2& lhs, Args&&...) const {
3434
- return eq (lhs, rhs);
3440
+ ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN ( eq (lhs, rhs) );
3435
3441
}
3436
3442
const K1& rhs;
3437
3443
const key_equal& eq;
@@ -3469,16 +3475,21 @@ class raw_hash_set {
3469
3475
template <typename ... Args>
3470
3476
inline void construct (slot_type* slot, Args&&... args) {
3471
3477
common ().RunWithReentrancyGuard ([&] {
3472
- PolicyTraits::construct (&alloc_ref (), slot, std::forward<Args>(args)...);
3478
+ allocator_type alloc (char_alloc_ref ());
3479
+ PolicyTraits::construct (&alloc, slot, std::forward<Args>(args)...);
3473
3480
});
3474
3481
}
3475
3482
inline void destroy (slot_type* slot) {
3476
- common ().RunWithReentrancyGuard (
3477
- [&] { PolicyTraits::destroy (&alloc_ref (), slot); });
3483
+ common ().RunWithReentrancyGuard ([&] {
3484
+ allocator_type alloc (char_alloc_ref ());
3485
+ PolicyTraits::destroy (&alloc, slot);
3486
+ });
3478
3487
}
3479
3488
inline void transfer (slot_type* to, slot_type* from) {
3480
- common ().RunWithReentrancyGuard (
3481
- [&] { PolicyTraits::transfer (&alloc_ref (), to, from); });
3489
+ common ().RunWithReentrancyGuard ([&] {
3490
+ allocator_type alloc (char_alloc_ref ());
3491
+ PolicyTraits::transfer (&alloc, to, from);
3492
+ });
3482
3493
}
3483
3494
3484
3495
// TODO(b/289225379): consider having a helper class that has the impls for
@@ -3522,8 +3533,7 @@ class raw_hash_set {
3522
3533
3523
3534
void clear_backing_array (bool reuse) {
3524
3535
ABSL_SWISSTABLE_ASSERT (capacity () > DefaultCapacity ());
3525
- CharAlloc alloc (alloc_ref ());
3526
- ClearBackingArray (common (), GetPolicyFunctions (), &alloc, reuse,
3536
+ ClearBackingArray (common (), GetPolicyFunctions (), &char_alloc_ref (), reuse,
3527
3537
SooEnabled ());
3528
3538
}
3529
3539
@@ -3541,9 +3551,8 @@ class raw_hash_set {
3541
3551
// Unpoison before returning the memory to the allocator.
3542
3552
SanitizerUnpoisonMemoryRegion (slot_array (), sizeof (slot_type) * capacity ());
3543
3553
infoz ().Unregister ();
3544
- CharAlloc alloc (alloc_ref ());
3545
3554
DeallocateBackingArray<BackingArrayAlignment (alignof (slot_type)),
3546
- CharAlloc>(&alloc , capacity (), control (),
3555
+ CharAlloc>(&char_alloc_ref () , capacity (), control (),
3547
3556
sizeof (slot_type), alignof (slot_type),
3548
3557
common ().has_infoz ());
3549
3558
}
@@ -3598,7 +3607,7 @@ class raw_hash_set {
3598
3607
static slot_type* to_slot (void * buf) { return static_cast <slot_type*>(buf); }
3599
3608
3600
3609
// Requires that lhs does not have a full SOO slot.
3601
- static void move_common (bool rhs_is_full_soo, allocator_type & rhs_alloc,
3610
+ static void move_common (bool rhs_is_full_soo, CharAlloc & rhs_alloc,
3602
3611
CommonFields& lhs, CommonFields&& rhs) {
3603
3612
if (PolicyTraits::transfer_uses_memcpy () || !rhs_is_full_soo) {
3604
3613
lhs = std::move (rhs);
@@ -3623,10 +3632,12 @@ class raw_hash_set {
3623
3632
}
3624
3633
CommonFields tmp = CommonFields (uninitialized_tag_t {});
3625
3634
const bool that_is_full_soo = that.is_full_soo ();
3626
- move_common (that_is_full_soo, that.alloc_ref (), tmp,
3635
+ move_common (that_is_full_soo, that.char_alloc_ref (), tmp,
3627
3636
std::move (that.common ()));
3628
- move_common (is_full_soo (), alloc_ref (), that.common (), std::move (common ()));
3629
- move_common (that_is_full_soo, that.alloc_ref (), common (), std::move (tmp));
3637
+ move_common (is_full_soo (), char_alloc_ref (), that.common (),
3638
+ std::move (common ()));
3639
+ move_common (that_is_full_soo, that.char_alloc_ref (), common (),
3640
+ std::move (tmp));
3630
3641
}
3631
3642
3632
3643
void annotate_for_bug_detection_on_move (
@@ -3653,11 +3664,11 @@ class raw_hash_set {
3653
3664
// We don't bother checking for this/that aliasing. We just need to avoid
3654
3665
// breaking the invariants in that case.
3655
3666
destructor_impl ();
3656
- move_common (that.is_full_soo (), that.alloc_ref (), common (),
3667
+ move_common (that.is_full_soo (), that.char_alloc_ref (), common (),
3657
3668
std::move (that.common ()));
3658
3669
hash_ref () = that.hash_ref ();
3659
3670
eq_ref () = that.eq_ref ();
3660
- CopyAlloc (alloc_ref (), that.alloc_ref (),
3671
+ CopyAlloc (char_alloc_ref (), that.char_alloc_ref (),
3661
3672
std::integral_constant<bool , propagate_alloc>());
3662
3673
that.common () = CommonFields::CreateDefault<SooEnabled ()>();
3663
3674
annotate_for_bug_detection_on_move (that);
@@ -3684,7 +3695,7 @@ class raw_hash_set {
3684
3695
}
3685
3696
raw_hash_set& move_assign (raw_hash_set&& that,
3686
3697
std::false_type /* propagate_alloc*/ ) {
3687
- if (alloc_ref () == that.alloc_ref ()) {
3698
+ if (char_alloc_ref () == that.char_alloc_ref ()) {
3688
3699
return assign_impl<false >(std::move (that));
3689
3700
}
3690
3701
// Aliasing can't happen here because allocs would compare equal above.
@@ -3913,10 +3924,12 @@ class raw_hash_set {
3913
3924
}
3914
3925
slot_type* soo_slot () {
3915
3926
ABSL_SWISSTABLE_ASSERT (is_soo ());
3916
- return static_cast <slot_type*>(common ().soo_data ());
3927
+ ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN (
3928
+ static_cast <slot_type*>(common ().soo_data ()));
3917
3929
}
3918
3930
const slot_type* soo_slot () const {
3919
- return const_cast <raw_hash_set*>(this )->soo_slot ();
3931
+ ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN (
3932
+ const_cast <raw_hash_set*>(this )->soo_slot ());
3920
3933
}
3921
3934
iterator soo_iterator () {
3922
3935
return {SooControl (), soo_slot (), common ().generation_ptr ()};
@@ -3933,14 +3946,14 @@ class raw_hash_set {
3933
3946
const hasher& hash_ref () const { return settings_.template get <1 >(); }
3934
3947
key_equal& eq_ref () { return settings_.template get <2 >(); }
3935
3948
const key_equal& eq_ref () const { return settings_.template get <2 >(); }
3936
- allocator_type& alloc_ref () { return settings_.template get <3 >(); }
3937
- const allocator_type& alloc_ref () const {
3949
+ CharAlloc& char_alloc_ref () { return settings_.template get <3 >(); }
3950
+ const CharAlloc& char_alloc_ref () const {
3938
3951
return settings_.template get <3 >();
3939
3952
}
3940
3953
3941
- static void * get_alloc_ref_fn (CommonFields& common) {
3954
+ static void * get_char_alloc_ref_fn (CommonFields& common) {
3942
3955
auto * h = reinterpret_cast <raw_hash_set*>(&common);
3943
- return &h->alloc_ref ();
3956
+ return &h->char_alloc_ref ();
3944
3957
}
3945
3958
static void * get_hash_ref_fn (CommonFields& common) {
3946
3959
auto * h = reinterpret_cast <raw_hash_set*>(&common);
@@ -3989,11 +4002,6 @@ class raw_hash_set {
3989
4002
static_assert (sizeof (value_type) <= (std::numeric_limits<uint32_t >::max)());
3990
4003
static constexpr size_t kBackingArrayAlignment =
3991
4004
BackingArrayAlignment (alignof (slot_type));
3992
- // TODO(b/397461659): store CharAlloc in the table instead of Alloc.
3993
- // If both allocators are empty, we can use the same pointer for both
3994
- // allocators.
3995
- static constexpr bool kAllocAndCharAllocPointersCompatible =
3996
- std::is_empty_v<CharAlloc> && std::is_empty_v<Alloc>;
3997
4005
static constexpr PolicyFunctions value = {
3998
4006
sizeof (key_type), sizeof (value_type), sizeof (slot_type),
3999
4007
alignof (slot_type), SooEnabled () ? SooCapacity () : 0 ,
@@ -4007,13 +4015,9 @@ class raw_hash_set {
4007
4015
? TransferRelocatable<sizeof (slot_type)>
4008
4016
: &raw_hash_set::transfer_slots_fn,
4009
4017
std::is_empty_v<Alloc> ? &GetRefForEmptyClass
4010
- : &raw_hash_set::get_alloc_ref_fn,
4011
- kAllocAndCharAllocPointersCompatible
4012
- ? &AllocateBackingArray<kBackingArrayAlignment , CharAlloc>
4013
- : &AllocateBackingArray<kBackingArrayAlignment , Alloc>,
4014
- kAllocAndCharAllocPointersCompatible
4015
- ? &DeallocateBackingArray<kBackingArrayAlignment , CharAlloc>
4016
- : &DeallocateBackingArray<kBackingArrayAlignment , Alloc>,
4018
+ : &raw_hash_set::get_char_alloc_ref_fn,
4019
+ &AllocateBackingArray<kBackingArrayAlignment , CharAlloc>,
4020
+ &DeallocateBackingArray<kBackingArrayAlignment , CharAlloc>,
4017
4021
&raw_hash_set::find_new_positions_and_transfer_slots_fn};
4018
4022
return value;
4019
4023
}
@@ -4022,9 +4026,9 @@ class raw_hash_set {
4022
4026
// CompressedTuple will ensure that sizeof is not affected by any of the empty
4023
4027
// fields that occur after CommonFields.
4024
4028
absl::container_internal::CompressedTuple<CommonFields, hasher, key_equal,
4025
- allocator_type >
4029
+ CharAlloc >
4026
4030
settings_{CommonFields::CreateDefault<SooEnabled ()>(), hasher{},
4027
- key_equal{}, allocator_type {}};
4031
+ key_equal{}, CharAlloc {}};
4028
4032
};
4029
4033
4030
4034
// Friend access for free functions in raw_hash_set.h.
0 commit comments