Commit 523b8699 by Evan Brown Committed by Copybara-Service

Change CommonFields from a private base class of raw_hash_set to be the first…

Change CommonFields from a private base class of raw_hash_set to be the first member of the settings_ CompressedTuple so that we can move growth_left into CommonFields.

This allows for removing growth_left as a separate argument for a few functions.

Also, move the infoz() accessor functions to be before the data members of CommonFields to comply with the style guide.

PiperOrigin-RevId: 493918310
Change-Id: I58474e37d3b16a1513d2931af6b153dea1d809c2
parent 2e177685
...@@ -90,7 +90,7 @@ static inline void* PrevSlot(void* slot, size_t slot_size) { ...@@ -90,7 +90,7 @@ static inline void* PrevSlot(void* slot, size_t slot_size) {
return reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(slot) - slot_size); return reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(slot) - slot_size);
} }
void DropDeletesWithoutResize(CommonFields& common, size_t& growth_left, void DropDeletesWithoutResize(CommonFields& common,
const PolicyFunctions& policy, void* tmp_space) { const PolicyFunctions& policy, void* tmp_space) {
void* set = &common; void* set = &common;
void* slot_array = common.slots_; void* slot_array = common.slots_;
...@@ -167,12 +167,11 @@ void DropDeletesWithoutResize(CommonFields& common, size_t& growth_left, ...@@ -167,12 +167,11 @@ void DropDeletesWithoutResize(CommonFields& common, size_t& growth_left,
slot_ptr = PrevSlot(slot_ptr, slot_size); slot_ptr = PrevSlot(slot_ptr, slot_size);
} }
} }
ResetGrowthLeft(common, growth_left); ResetGrowthLeft(common);
common.infoz().RecordRehash(total_probe_length); common.infoz().RecordRehash(total_probe_length);
} }
void EraseMetaOnly(CommonFields& c, size_t& growth_left, ctrl_t* it, void EraseMetaOnly(CommonFields& c, ctrl_t* it, size_t slot_size) {
size_t slot_size) {
assert(IsFull(*it) && "erasing a dangling iterator"); assert(IsFull(*it) && "erasing a dangling iterator");
--c.size_; --c.size_;
const auto index = static_cast<size_t>(it - c.control_); const auto index = static_cast<size_t>(it - c.control_);
...@@ -190,15 +189,15 @@ void EraseMetaOnly(CommonFields& c, size_t& growth_left, ctrl_t* it, ...@@ -190,15 +189,15 @@ void EraseMetaOnly(CommonFields& c, size_t& growth_left, ctrl_t* it,
SetCtrl(c, index, was_never_full ? ctrl_t::kEmpty : ctrl_t::kDeleted, SetCtrl(c, index, was_never_full ? ctrl_t::kEmpty : ctrl_t::kDeleted,
slot_size); slot_size);
growth_left += (was_never_full ? 1 : 0); c.growth_left_ += (was_never_full ? 1 : 0);
c.infoz().RecordErase(); c.infoz().RecordErase();
} }
void ClearBackingArray(CommonFields& c, size_t& growth_left, void ClearBackingArray(CommonFields& c, const PolicyFunctions& policy,
const PolicyFunctions& policy, bool reuse) { bool reuse) {
if (reuse) { if (reuse) {
c.size_ = 0; c.size_ = 0;
ResetCtrl(c, growth_left, policy.slot_size); ResetCtrl(c, policy.slot_size);
c.infoz().RecordStorageChanged(0, c.capacity_); c.infoz().RecordStorageChanged(0, c.capacity_);
} else { } else {
void* set = &c; void* set = &c;
...@@ -207,7 +206,7 @@ void ClearBackingArray(CommonFields& c, size_t& growth_left, ...@@ -207,7 +206,7 @@ void ClearBackingArray(CommonFields& c, size_t& growth_left,
c.slots_ = nullptr; c.slots_ = nullptr;
c.size_ = 0; c.size_ = 0;
c.capacity_ = 0; c.capacity_ = 0;
growth_left = 0; c.growth_left_ = 0;
c.infoz().RecordClearedReservation(); c.infoz().RecordClearedReservation();
assert(c.size_ == 0); assert(c.size_ == 0);
c.infoz().RecordStorageChanged(0, 0); c.infoz().RecordStorageChanged(0, 0);
......
...@@ -740,14 +740,19 @@ class CommonFields : public HashtablezInfoHandle { ...@@ -740,14 +740,19 @@ class CommonFields : public HashtablezInfoHandle {
control_(that.control_), control_(that.control_),
slots_(that.slots_), slots_(that.slots_),
size_(that.size_), size_(that.size_),
capacity_(that.capacity_) { capacity_(that.capacity_),
growth_left_(that.growth_left_) {
that.control_ = EmptyGroup(); that.control_ = EmptyGroup();
that.slots_ = nullptr; that.slots_ = nullptr;
that.size_ = 0; that.size_ = 0;
that.capacity_ = 0; that.capacity_ = 0;
that.growth_left_ = 0;
} }
CommonFields& operator=(CommonFields&&) = default; CommonFields& operator=(CommonFields&&) = default;
HashtablezInfoHandle& infoz() { return *this; }
const HashtablezInfoHandle& infoz() const { return *this; }
// TODO(b/259599413): Investigate removing some of these fields: // TODO(b/259599413): Investigate removing some of these fields:
// - control/slots can be derived from each other // - control/slots can be derived from each other
// - size can be moved into the slot array // - size can be moved into the slot array
...@@ -768,8 +773,8 @@ class CommonFields : public HashtablezInfoHandle { ...@@ -768,8 +773,8 @@ class CommonFields : public HashtablezInfoHandle {
// The total number of available slots. // The total number of available slots.
size_t capacity_ = 0; size_t capacity_ = 0;
HashtablezInfoHandle& infoz() { return *this; } // The number of slots we can still fill without needing to rehash.
const HashtablezInfoHandle& infoz() const { return *this; } size_t growth_left_ = 0;
}; };
// Returns he number of "cloned control bytes". // Returns he number of "cloned control bytes".
...@@ -968,21 +973,20 @@ extern template FindInfo find_first_non_full(const CommonFields&, size_t); ...@@ -968,21 +973,20 @@ extern template FindInfo find_first_non_full(const CommonFields&, size_t);
// performance critical routines. // performance critical routines.
FindInfo find_first_non_full_outofline(const CommonFields&, size_t); FindInfo find_first_non_full_outofline(const CommonFields&, size_t);
inline void ResetGrowthLeft(CommonFields& common, size_t& growth_left) { inline void ResetGrowthLeft(CommonFields& common) {
growth_left = CapacityToGrowth(common.capacity_) - common.size_; common.growth_left_ = CapacityToGrowth(common.capacity_) - common.size_;
} }
// Sets `ctrl` to `{kEmpty, kSentinel, ..., kEmpty}`, marking the entire // Sets `ctrl` to `{kEmpty, kSentinel, ..., kEmpty}`, marking the entire
// array as marked as empty. // array as marked as empty.
inline void ResetCtrl(CommonFields& common, size_t& growth_left, inline void ResetCtrl(CommonFields& common, size_t slot_size) {
size_t slot_size) {
const size_t capacity = common.capacity_; const size_t capacity = common.capacity_;
ctrl_t* ctrl = common.control_; ctrl_t* ctrl = common.control_;
std::memset(ctrl, static_cast<int8_t>(ctrl_t::kEmpty), std::memset(ctrl, static_cast<int8_t>(ctrl_t::kEmpty),
capacity + 1 + NumClonedBytes()); capacity + 1 + NumClonedBytes());
ctrl[capacity] = ctrl_t::kSentinel; ctrl[capacity] = ctrl_t::kSentinel;
SanitizerPoisonMemoryRegion(common.slots_, slot_size * capacity); SanitizerPoisonMemoryRegion(common.slots_, slot_size * capacity);
ResetGrowthLeft(common, growth_left); ResetGrowthLeft(common);
} }
// Sets `ctrl[i]` to `h`. // Sets `ctrl[i]` to `h`.
...@@ -1027,8 +1031,7 @@ inline size_t AllocSize(size_t capacity, size_t slot_size, size_t slot_align) { ...@@ -1027,8 +1031,7 @@ inline size_t AllocSize(size_t capacity, size_t slot_size, size_t slot_align) {
} }
template <typename Alloc, size_t SizeOfSlot, size_t AlignOfSlot> template <typename Alloc, size_t SizeOfSlot, size_t AlignOfSlot>
ABSL_ATTRIBUTE_NOINLINE void InitializeSlots(CommonFields& c, ABSL_ATTRIBUTE_NOINLINE void InitializeSlots(CommonFields& c, Alloc alloc) {
size_t& growth_left, Alloc alloc) {
assert(c.capacity_); assert(c.capacity_);
// Folks with custom allocators often make unwarranted assumptions about the // Folks with custom allocators often make unwarranted assumptions about the
// behavior of their classes vis-a-vis trivial destructability and what // behavior of their classes vis-a-vis trivial destructability and what
...@@ -1045,7 +1048,7 @@ ABSL_ATTRIBUTE_NOINLINE void InitializeSlots(CommonFields& c, ...@@ -1045,7 +1048,7 @@ ABSL_ATTRIBUTE_NOINLINE void InitializeSlots(CommonFields& c,
Allocate<AlignOfSlot>(&alloc, AllocSize(cap, SizeOfSlot, AlignOfSlot))); Allocate<AlignOfSlot>(&alloc, AllocSize(cap, SizeOfSlot, AlignOfSlot)));
c.control_ = reinterpret_cast<ctrl_t*>(mem); c.control_ = reinterpret_cast<ctrl_t*>(mem);
c.slots_ = mem + SlotOffset(cap, AlignOfSlot); c.slots_ = mem + SlotOffset(cap, AlignOfSlot);
ResetCtrl(c, growth_left, SizeOfSlot); ResetCtrl(c, SizeOfSlot);
if (sample_size) { if (sample_size) {
c.infoz() = Sample(sample_size); c.infoz() = Sample(sample_size);
} }
...@@ -1073,12 +1076,11 @@ struct PolicyFunctions { ...@@ -1073,12 +1076,11 @@ struct PolicyFunctions {
// ClearBackingArray clears the backing array, either modifying it in place, // ClearBackingArray clears the backing array, either modifying it in place,
// or creating a new one based on the value of "reuse". // or creating a new one based on the value of "reuse".
// REQUIRES: c.capacity > 0 // REQUIRES: c.capacity > 0
void ClearBackingArray(CommonFields& c, size_t& growth_left, void ClearBackingArray(CommonFields& c, const PolicyFunctions& policy,
const PolicyFunctions& policy, bool reuse); bool reuse);
// Type-erased version of raw_hash_set::erase_meta_only. // Type-erased version of raw_hash_set::erase_meta_only.
void EraseMetaOnly(CommonFields& c, size_t& growth_left, ctrl_t* it, void EraseMetaOnly(CommonFields& c, ctrl_t* it, size_t slot_size);
size_t slot_size);
// Function to place in PolicyFunctions::dealloc for raw_hash_sets // Function to place in PolicyFunctions::dealloc for raw_hash_sets
// that are using std::allocator. This allows us to share the same // that are using std::allocator. This allows us to share the same
...@@ -1106,7 +1108,7 @@ ABSL_ATTRIBUTE_NOINLINE void TransferRelocatable(void*, void* dst, void* src) { ...@@ -1106,7 +1108,7 @@ ABSL_ATTRIBUTE_NOINLINE void TransferRelocatable(void*, void* dst, void* src) {
} }
// Type-erased version of raw_hash_set::drop_deletes_without_resize. // Type-erased version of raw_hash_set::drop_deletes_without_resize.
void DropDeletesWithoutResize(CommonFields& common, size_t& growth_left, void DropDeletesWithoutResize(CommonFields& common,
const PolicyFunctions& policy, void* tmp_space); const PolicyFunctions& policy, void* tmp_space);
// A SwissTable. // A SwissTable.
...@@ -1130,7 +1132,7 @@ void DropDeletesWithoutResize(CommonFields& common, size_t& growth_left, ...@@ -1130,7 +1132,7 @@ void DropDeletesWithoutResize(CommonFields& common, size_t& growth_left,
// the storage of the hashtable will be allocated and the elements will be // the storage of the hashtable will be allocated and the elements will be
// constructed and destroyed. // constructed and destroyed.
template <class Policy, class Hash, class Eq, class Alloc> template <class Policy, class Hash, class Eq, class Alloc>
class raw_hash_set : private CommonFields { class raw_hash_set {
using PolicyTraits = hash_policy_traits<Policy>; using PolicyTraits = hash_policy_traits<Policy>;
using KeyArgImpl = using KeyArgImpl =
KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>; KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>;
...@@ -1337,7 +1339,7 @@ class raw_hash_set : private CommonFields { ...@@ -1337,7 +1339,7 @@ class raw_hash_set : private CommonFields {
size_t bucket_count, const hasher& hash = hasher(), size_t bucket_count, const hasher& hash = hasher(),
const key_equal& eq = key_equal(), const key_equal& eq = key_equal(),
const allocator_type& alloc = allocator_type()) const allocator_type& alloc = allocator_type())
: settings_(0u, hash, eq, alloc) { : settings_(CommonFields{}, hash, eq, alloc) {
if (bucket_count) { if (bucket_count) {
common().capacity_ = NormalizeCapacity(bucket_count); common().capacity_ = NormalizeCapacity(bucket_count);
initialize_slots(); initialize_slots();
...@@ -1462,15 +1464,13 @@ class raw_hash_set : private CommonFields { ...@@ -1462,15 +1464,13 @@ class raw_hash_set : private CommonFields {
: // Hash, equality and allocator are copied instead of moved because : // Hash, equality and allocator are copied instead of moved because
// `that` must be left valid. If Hash is std::function<Key>, moving it // `that` must be left valid. If Hash is std::function<Key>, moving it
// would create a nullptr functor that cannot be called. // would create a nullptr functor that cannot be called.
CommonFields(std::move(that)), settings_(absl::exchange(that.common(), CommonFields{}),
settings_(absl::exchange(that.growth_left(), size_t{0}),
that.hash_ref(), that.eq_ref(), that.alloc_ref()) {} that.hash_ref(), that.eq_ref(), that.alloc_ref()) {}
raw_hash_set(raw_hash_set&& that, const allocator_type& a) raw_hash_set(raw_hash_set&& that, const allocator_type& a)
: settings_(0, that.hash_ref(), that.eq_ref(), a) { : settings_(CommonFields{}, that.hash_ref(), that.eq_ref(), a) {
if (a == that.alloc_ref()) { if (a == that.alloc_ref()) {
std::swap(common(), that.common()); std::swap(common(), that.common());
std::swap(growth_left(), that.growth_left());
} else { } else {
reserve(that.size()); reserve(that.size());
// Note: this will copy elements of dense_set and unordered_set instead of // Note: this will copy elements of dense_set and unordered_set instead of
...@@ -1545,7 +1545,7 @@ class raw_hash_set : private CommonFields { ...@@ -1545,7 +1545,7 @@ class raw_hash_set : private CommonFields {
// Already guaranteed to be empty; so nothing to do. // Already guaranteed to be empty; so nothing to do.
} else { } else {
destroy_slots(); destroy_slots();
ClearBackingArray(common(), growth_left(), GetPolicyFunctions(), ClearBackingArray(common(), GetPolicyFunctions(),
/*reuse=*/cap < 128); /*reuse=*/cap < 128);
} }
} }
...@@ -1843,7 +1843,6 @@ class raw_hash_set : private CommonFields { ...@@ -1843,7 +1843,6 @@ class raw_hash_set : private CommonFields {
typename AllocTraits::propagate_on_container_swap{})) { typename AllocTraits::propagate_on_container_swap{})) {
using std::swap; using std::swap;
swap(common(), that.common()); swap(common(), that.common());
swap(growth_left(), that.growth_left());
swap(hash_ref(), that.hash_ref()); swap(hash_ref(), that.hash_ref());
swap(eq_ref(), that.eq_ref()); swap(eq_ref(), that.eq_ref());
SwapAlloc(alloc_ref(), that.alloc_ref(), SwapAlloc(alloc_ref(), that.alloc_ref(),
...@@ -1853,7 +1852,7 @@ class raw_hash_set : private CommonFields { ...@@ -1853,7 +1852,7 @@ class raw_hash_set : private CommonFields {
void rehash(size_t n) { void rehash(size_t n) {
if (n == 0 && capacity() == 0) return; if (n == 0 && capacity() == 0) return;
if (n == 0 && size() == 0) { if (n == 0 && size() == 0) {
ClearBackingArray(common(), growth_left(), GetPolicyFunctions(), ClearBackingArray(common(), GetPolicyFunctions(),
/*reuse=*/false); /*reuse=*/false);
return; return;
} }
...@@ -2079,7 +2078,7 @@ class raw_hash_set : private CommonFields { ...@@ -2079,7 +2078,7 @@ class raw_hash_set : private CommonFields {
// This merely updates the pertinent control byte. This can be used in // This merely updates the pertinent control byte. This can be used in
// conjunction with Policy::transfer to move the object to another place. // conjunction with Policy::transfer to move the object to another place.
void erase_meta_only(const_iterator it) { void erase_meta_only(const_iterator it) {
EraseMetaOnly(common(), growth_left(), it.inner_.ctrl_, sizeof(slot_type)); EraseMetaOnly(common(), it.inner_.ctrl_, sizeof(slot_type));
} }
// Allocates a backing array for `self` and initializes its control bytes. // Allocates a backing array for `self` and initializes its control bytes.
...@@ -2094,7 +2093,7 @@ class raw_hash_set : private CommonFields { ...@@ -2094,7 +2093,7 @@ class raw_hash_set : private CommonFields {
using CharAlloc = using CharAlloc =
typename absl::allocator_traits<Alloc>::template rebind_alloc<char>; typename absl::allocator_traits<Alloc>::template rebind_alloc<char>;
InitializeSlots<CharAlloc, sizeof(slot_type), alignof(slot_type)>( InitializeSlots<CharAlloc, sizeof(slot_type), alignof(slot_type)>(
common(), growth_left(), CharAlloc(alloc_ref())); common(), CharAlloc(alloc_ref()));
} }
ABSL_ATTRIBUTE_NOINLINE void resize(size_t new_capacity) { ABSL_ATTRIBUTE_NOINLINE void resize(size_t new_capacity) {
...@@ -2134,8 +2133,7 @@ class raw_hash_set : private CommonFields { ...@@ -2134,8 +2133,7 @@ class raw_hash_set : private CommonFields {
inline void drop_deletes_without_resize() { inline void drop_deletes_without_resize() {
// Stack-allocate space for swapping elements. // Stack-allocate space for swapping elements.
alignas(slot_type) unsigned char tmp[sizeof(slot_type)]; alignas(slot_type) unsigned char tmp[sizeof(slot_type)];
DropDeletesWithoutResize(common(), growth_left(), GetPolicyFunctions(), DropDeletesWithoutResize(common(), GetPolicyFunctions(), tmp);
tmp);
} }
// Called whenever the table *might* need to conditionally grow. // Called whenever the table *might* need to conditionally grow.
...@@ -2305,15 +2303,15 @@ class raw_hash_set : private CommonFields { ...@@ -2305,15 +2303,15 @@ class raw_hash_set : private CommonFields {
// side-effect. // side-effect.
// //
// See `CapacityToGrowth()`. // See `CapacityToGrowth()`.
size_t& growth_left() { return settings_.template get<0>(); } size_t& growth_left() { return common().growth_left_; }
// Prefetch the heap-allocated memory region to resolve potential TLB misses. // Prefetch the heap-allocated memory region to resolve potential TLB misses.
// This is intended to overlap with execution of calculating the hash for a // This is intended to overlap with execution of calculating the hash for a
// key. // key.
void prefetch_heap_block() const { base_internal::PrefetchT2(control()); } void prefetch_heap_block() const { base_internal::PrefetchT2(control()); }
CommonFields& common() { return *this; } CommonFields& common() { return settings_.template get<0>(); }
const CommonFields& common() const { return *this; } const CommonFields& common() const { return settings_.template get<0>(); }
ctrl_t* control() const { return common().control_; } ctrl_t* control() const { return common().control_; }
slot_type* slot_array() const { slot_type* slot_array() const {
...@@ -2369,13 +2367,12 @@ class raw_hash_set : private CommonFields { ...@@ -2369,13 +2367,12 @@ class raw_hash_set : private CommonFields {
return value; return value;
} }
// Bundle together growth_left (number of slots that can be filled without // Bundle together CommonFields plus other objects which might be empty.
// rehashing) plus other objects which might be empty. CompressedTuple will // CompressedTuple will ensure that sizeof is not affected by any of the empty
// ensure that sizeof is not affected by any of the empty fields that occur // fields that occur after CommonFields.
// after growth_left. absl::container_internal::CompressedTuple<CommonFields, hasher, key_equal,
absl::container_internal::CompressedTuple<size_t /* growth_left */, hasher, allocator_type>
key_equal, allocator_type> settings_{CommonFields{}, hasher{}, key_equal{}, allocator_type{}};
settings_{0u, hasher{}, key_equal{}, allocator_type{}};
}; };
// Erases all elements that satisfy the predicate `pred` from the container `c`. // Erases all elements that satisfy the predicate `pred` from the container `c`.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment