Commit 1d401d9c by Vitaly Goldshteyn Committed by Copybara-Service

Use `IterateOverFullSlots` in `absl::erase_if` for hash table.

`EraseIf` itself is not very hot function, but I want to use that as demonstration of the speed of iteration via `IterateOverFullSlots`.

Motivation:
1. We are still going to save some resources.
2. It is the first step to implement faster `absl::c_for_each` that may give larger benefits. Will require readability considerations.

`BM_EraseIf/num_elements:1000/num_erased:0` is just iteration and it gives 60% speed up.
On smaller tables  removing all elements shows 25% speed up. Note that on small tables erasing is much faster due to cl/592272653.

```
name                                          old cpu/op   new cpu/op   delta
BM_EraseIf/num_elements:10/num_erased:0       3.41ns ± 5%  3.03ns ± 3%  -11.14%  (p=0.000 n=37+35)
BM_EraseIf/num_elements:1000/num_erased:0     6.06ns ± 3%  2.42ns ± 3%  -60.05%  (p=0.000 n=34+37)
BM_EraseIf/num_elements:10/num_erased:5       5.90ns ± 3%  4.44ns ± 4%  -24.88%  (p=0.000 n=36+37)
BM_EraseIf/num_elements:1000/num_erased:500   11.0ns ± 2%   9.2ns ± 2%  -16.60%  (p=0.000 n=35+37)
BM_EraseIf/num_elements:10/num_erased:10      8.03ns ± 3%  5.77ns ± 2%  -28.19%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:1000/num_erased:1000  9.00ns ± 3%  7.83ns ± 2%  -12.98%  (p=0.000 n=37+37)

name                                          old time/op          new time/op          delta
BM_EraseIf/num_elements:10/num_erased:0       3.42ns ± 5%          3.04ns ± 3%  -11.13%  (p=0.000 n=37+36)
BM_EraseIf/num_elements:1000/num_erased:0     6.07ns ± 3%          2.42ns ± 3%  -60.10%  (p=0.000 n=34+37)
BM_EraseIf/num_elements:10/num_erased:5       5.93ns ± 3%          4.45ns ± 4%  -24.89%  (p=0.000 n=36+37)
BM_EraseIf/num_elements:1000/num_erased:500   11.1ns ± 2%           9.2ns ± 2%  -16.61%  (p=0.000 n=35+37)
BM_EraseIf/num_elements:10/num_erased:10      8.06ns ± 3%          5.79ns ± 2%  -28.19%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:1000/num_erased:1000  9.03ns ± 3%          7.85ns ± 2%  -12.98%  (p=0.000 n=37+37)

name                                          old INSTRUCTIONS/op  new INSTRUCTIONS/op  delta
BM_EraseIf/num_elements:10/num_erased:0         19.5 ± 1%            14.9 ± 0%  -23.79%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:1000/num_erased:0       19.9 ± 0%            12.7 ± 0%  -36.20%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:10/num_erased:5         36.9 ± 1%            30.9 ± 0%  -16.47%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:1000/num_erased:500     44.8 ± 0%            37.6 ± 0%  -16.06%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:10/num_erased:10        53.0 ± 1%            46.9 ± 0%  -11.61%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:1000/num_erased:1000    69.8 ± 0%            62.6 ± 0%  -10.32%  (p=0.000 n=36+37)

name                                          old CYCLES/op        new CYCLES/op        delta
BM_EraseIf/num_elements:10/num_erased:0         6.10 ± 7%            4.91 ± 1%  -19.49%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:1000/num_erased:0       19.4 ± 1%             7.7 ± 2%  -60.04%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:10/num_erased:5         13.9 ± 4%             9.2 ± 3%  -33.80%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:1000/num_erased:500     35.5 ± 0%            29.5 ± 1%  -16.74%  (p=0.000 n=37+37)
BM_EraseIf/num_elements:10/num_erased:10        20.8 ± 5%            13.5 ± 0%  -35.07%  (p=0.000 n=37+30)
BM_EraseIf/num_elements:1000/num_erased:1000    28.9 ± 0%            25.1 ± 0%  -13.06%  (p=0.000 n=37+37)
```

PiperOrigin-RevId: 642016364
Change-Id: I8be6af5916bd45fd110bb0398c3ffe932a6a083f
parent d30298a1
...@@ -709,8 +709,10 @@ cc_test( ...@@ -709,8 +709,10 @@ cc_test(
":hash_policy_testing", ":hash_policy_testing",
":hashtable_debug", ":hashtable_debug",
":hashtablez_sampler", ":hashtablez_sampler",
":node_hash_set",
":raw_hash_set", ":raw_hash_set",
":test_allocator", ":test_allocator",
":test_instance_tracker",
"//absl/base", "//absl/base",
"//absl/base:config", "//absl/base:config",
"//absl/base:core_headers", "//absl/base:core_headers",
......
...@@ -783,10 +783,12 @@ absl_cc_test( ...@@ -783,10 +783,12 @@ absl_cc_test(
absl::hashtablez_sampler absl::hashtablez_sampler
absl::log absl::log
absl::memory absl::memory
absl::node_hash_set
absl::prefetch absl::prefetch
absl::raw_hash_set absl::raw_hash_set
absl::strings absl::strings
absl::test_allocator absl::test_allocator
absl::test_instance_tracker
absl::type_traits absl::type_traits
GTest::gmock_main GTest::gmock_main
) )
......
...@@ -1222,6 +1222,8 @@ class RawHashSetLayout { ...@@ -1222,6 +1222,8 @@ class RawHashSetLayout {
size_t slot_offset_; size_t slot_offset_;
}; };
struct HashtableFreeFunctionsAccess;
// We only allow a maximum of 1 SOO element, which makes the implementation // We only allow a maximum of 1 SOO element, which makes the implementation
// much simpler. Complications with multiple SOO elements include: // much simpler. Complications with multiple SOO elements include:
// - Satisfying the guarantee that erasing one element doesn't invalidate // - Satisfying the guarantee that erasing one element doesn't invalidate
...@@ -1858,8 +1860,9 @@ inline void* SlotAddress(void* slot_array, size_t slot, size_t slot_size) { ...@@ -1858,8 +1860,9 @@ inline void* SlotAddress(void* slot_array, size_t slot, size_t slot_size) {
} }
// Iterates over all full slots and calls `cb(const ctrl_t*, SlotType*)`. // Iterates over all full slots and calls `cb(const ctrl_t*, SlotType*)`.
// NOTE: no erasure from this table allowed during Callback call. // If kAllowRemoveReentrance is false, no erasure from this table allowed during
template <class SlotType, class Callback> // Callback call. This mode is slightly faster.
template <bool kAllowRemoveReentrance, class SlotType, class Callback>
ABSL_ATTRIBUTE_ALWAYS_INLINE inline void IterateOverFullSlots( ABSL_ATTRIBUTE_ALWAYS_INLINE inline void IterateOverFullSlots(
const CommonFields& c, SlotType* slot, Callback cb) { const CommonFields& c, SlotType* slot, Callback cb) {
const size_t cap = c.capacity(); const size_t cap = c.capacity();
...@@ -1882,18 +1885,25 @@ ABSL_ATTRIBUTE_ALWAYS_INLINE inline void IterateOverFullSlots( ...@@ -1882,18 +1885,25 @@ ABSL_ATTRIBUTE_ALWAYS_INLINE inline void IterateOverFullSlots(
--ctrl; --ctrl;
--slot; --slot;
for (uint32_t i : mask) { for (uint32_t i : mask) {
if (!kAllowRemoveReentrance || ABSL_PREDICT_TRUE(IsFull(ctrl[i]))) {
cb(ctrl + i, slot + i); cb(ctrl + i, slot + i);
} }
}
return; return;
} }
size_t remaining = c.size(); size_t remaining = c.size();
while (remaining != 0) { while (remaining != 0) {
for (uint32_t i : GroupFullEmptyOrDeleted(ctrl).MaskFull()) { for (uint32_t i : GroupFullEmptyOrDeleted(ctrl).MaskFull()) {
if (!kAllowRemoveReentrance || ABSL_PREDICT_TRUE(IsFull(ctrl[i]))) {
cb(ctrl + i, slot + i); cb(ctrl + i, slot + i);
}
--remaining; --remaining;
} }
slot += Group::kWidth;
ctrl += Group::kWidth; ctrl += Group::kWidth;
if (kAllowRemoveReentrance && *(ctrl - 1) == ctrl_t::kSentinel) {
break;
}
slot += Group::kWidth;
} }
} }
...@@ -2430,6 +2440,7 @@ class raw_hash_set { ...@@ -2430,6 +2440,7 @@ class raw_hash_set {
class iterator : private HashSetIteratorGenerationInfo { class iterator : private HashSetIteratorGenerationInfo {
friend class raw_hash_set; friend class raw_hash_set;
friend struct HashtableFreeFunctionsAccess;
public: public:
using iterator_category = std::forward_iterator_tag; using iterator_category = std::forward_iterator_tag;
...@@ -2736,7 +2747,7 @@ class raw_hash_set { ...@@ -2736,7 +2747,7 @@ class raw_hash_set {
size_t offset = cap; size_t offset = cap;
const size_t shift = const size_t shift =
is_single_group(cap) ? (PerTableSalt(control()) | 1) : 0; is_single_group(cap) ? (PerTableSalt(control()) | 1) : 0;
IterateOverFullSlots( IterateOverFullSlots</*kAllowRemoveReentrance=*/false>(
that.common(), that.slot_array(), that.common(), that.slot_array(),
[&](const ctrl_t* that_ctrl, [&](const ctrl_t* that_ctrl,
slot_type* that_slot) ABSL_ATTRIBUTE_ALWAYS_INLINE { slot_type* that_slot) ABSL_ATTRIBUTE_ALWAYS_INLINE {
...@@ -3411,6 +3422,8 @@ class raw_hash_set { ...@@ -3411,6 +3422,8 @@ class raw_hash_set {
friend struct absl::container_internal::hashtable_debug_internal:: friend struct absl::container_internal::hashtable_debug_internal::
HashtableDebugAccess; HashtableDebugAccess;
friend struct absl::container_internal::HashtableFreeFunctionsAccess;
struct FindElement { struct FindElement {
template <class K, class... Args> template <class K, class... Args>
const_iterator operator()(const K& key, Args&&...) const { const_iterator operator()(const K& key, Args&&...) const {
...@@ -3521,7 +3534,7 @@ class raw_hash_set { ...@@ -3521,7 +3534,7 @@ class raw_hash_set {
inline void destroy_slots() { inline void destroy_slots() {
assert(!is_soo()); assert(!is_soo());
if (PolicyTraits::template destroy_is_trivial<Alloc>()) return; if (PolicyTraits::template destroy_is_trivial<Alloc>()) return;
IterateOverFullSlots( IterateOverFullSlots</*kAllowRemoveReentrance=*/false>(
common(), slot_array(), common(), slot_array(),
[&](const ctrl_t*, slot_type* slot) [&](const ctrl_t*, slot_type* slot)
ABSL_ATTRIBUTE_ALWAYS_INLINE { this->destroy(slot); }); ABSL_ATTRIBUTE_ALWAYS_INLINE { this->destroy(slot); });
...@@ -3866,7 +3879,8 @@ class raw_hash_set { ...@@ -3866,7 +3879,8 @@ class raw_hash_set {
} }
// We only do validation for small tables so that it's constant time. // We only do validation for small tables so that it's constant time.
if (capacity() > 16) return; if (capacity() > 16) return;
IterateOverFullSlots(common(), slot_array(), assert_consistent); IterateOverFullSlots</*kAllowRemoveReentrance=*/false>(
common(), slot_array(), assert_consistent);
#endif #endif
} }
...@@ -4030,19 +4044,41 @@ class raw_hash_set { ...@@ -4030,19 +4044,41 @@ class raw_hash_set {
key_equal{}, allocator_type{}}; key_equal{}, allocator_type{}};
}; };
// Friend access for free functions in raw_hash_set.h.
struct HashtableFreeFunctionsAccess {
template <class Predicate, typename Set>
static typename Set::size_type EraseIf(Predicate& pred, Set* c) {
if (c->empty()) {
return 0;
}
if (c->is_soo()) {
auto it = c->soo_iterator();
if (!pred(*it)) {
return 0;
}
c->destroy(it.slot());
c->common().set_empty_soo();
return 1;
}
size_t num_deleted = 0;
IterateOverFullSlots</*kAllowRemoveReentrance=*/true>(
c->common(), c->slot_array(), [&](const ctrl_t* ctrl, auto* slot) {
if (pred(Set::PolicyTraits::element(slot))) {
c->destroy(slot);
EraseMetaOnly(c->common(), static_cast<size_t>(ctrl - c->control()),
sizeof(*slot));
++num_deleted;
}
});
return num_deleted;
}
};
// Erases all elements that satisfy the predicate `pred` from the container `c`. // Erases all elements that satisfy the predicate `pred` from the container `c`.
template <typename P, typename H, typename E, typename A, typename Predicate> template <typename P, typename H, typename E, typename A, typename Predicate>
typename raw_hash_set<P, H, E, A>::size_type EraseIf( typename raw_hash_set<P, H, E, A>::size_type EraseIf(
Predicate& pred, raw_hash_set<P, H, E, A>* c) { Predicate& pred, raw_hash_set<P, H, E, A>* c) {
const auto initial_size = c->size(); return HashtableFreeFunctionsAccess::EraseIf(pred, c);
for (auto it = c->begin(), last = c->end(); it != last;) {
if (pred(*it)) {
c->erase(it++);
} else {
++it;
}
}
return initial_size - c->size();
} }
namespace hashtable_debug_internal { namespace hashtable_debug_internal {
......
...@@ -52,12 +52,15 @@ ...@@ -52,12 +52,15 @@
#include "absl/container/internal/hashtable_debug.h" #include "absl/container/internal/hashtable_debug.h"
#include "absl/container/internal/hashtablez_sampler.h" #include "absl/container/internal/hashtablez_sampler.h"
#include "absl/container/internal/test_allocator.h" #include "absl/container/internal/test_allocator.h"
#include "absl/container/internal/test_instance_tracker.h"
#include "absl/container/node_hash_set.h"
#include "absl/functional/function_ref.h" #include "absl/functional/function_ref.h"
#include "absl/hash/hash.h" #include "absl/hash/hash.h"
#include "absl/log/check.h" #include "absl/log/check.h"
#include "absl/log/log.h" #include "absl/log/log.h"
#include "absl/memory/memory.h" #include "absl/memory/memory.h"
#include "absl/meta/type_traits.h" #include "absl/meta/type_traits.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/string_view.h" #include "absl/strings/string_view.h"
namespace absl { namespace absl {
...@@ -797,21 +800,22 @@ TEST(Table, EmptyFunctorOptimization) { ...@@ -797,21 +800,22 @@ TEST(Table, EmptyFunctorOptimization) {
template <class TableType> template <class TableType>
class SooTest : public testing::Test {}; class SooTest : public testing::Test {};
TYPED_TEST_SUITE_P(SooTest); using SooTableTypes = ::testing::Types<SooIntTable, NonSooIntTable>;
TYPED_TEST_SUITE(SooTest, SooTableTypes);
TYPED_TEST_P(SooTest, Empty) { TYPED_TEST(SooTest, Empty) {
TypeParam t; TypeParam t;
EXPECT_EQ(0, t.size()); EXPECT_EQ(0, t.size());
EXPECT_TRUE(t.empty()); EXPECT_TRUE(t.empty());
} }
TYPED_TEST_P(SooTest, LookupEmpty) { TYPED_TEST(SooTest, LookupEmpty) {
TypeParam t; TypeParam t;
auto it = t.find(0); auto it = t.find(0);
EXPECT_TRUE(it == t.end()); EXPECT_TRUE(it == t.end());
} }
TYPED_TEST_P(SooTest, Insert1) { TYPED_TEST(SooTest, Insert1) {
TypeParam t; TypeParam t;
EXPECT_TRUE(t.find(0) == t.end()); EXPECT_TRUE(t.find(0) == t.end());
auto res = t.emplace(0); auto res = t.emplace(0);
...@@ -821,7 +825,7 @@ TYPED_TEST_P(SooTest, Insert1) { ...@@ -821,7 +825,7 @@ TYPED_TEST_P(SooTest, Insert1) {
EXPECT_THAT(*t.find(0), 0); EXPECT_THAT(*t.find(0), 0);
} }
TYPED_TEST_P(SooTest, Insert2) { TYPED_TEST(SooTest, Insert2) {
TypeParam t; TypeParam t;
EXPECT_TRUE(t.find(0) == t.end()); EXPECT_TRUE(t.find(0) == t.end());
auto res = t.emplace(0); auto res = t.emplace(0);
...@@ -884,7 +888,7 @@ TEST(Table, InsertCollisionAndFindAfterDelete) { ...@@ -884,7 +888,7 @@ TEST(Table, InsertCollisionAndFindAfterDelete) {
EXPECT_TRUE(t.empty()); EXPECT_TRUE(t.empty());
} }
TYPED_TEST_P(SooTest, EraseInSmallTables) { TYPED_TEST(SooTest, EraseInSmallTables) {
for (int64_t size = 0; size < 64; ++size) { for (int64_t size = 0; size < 64; ++size) {
TypeParam t; TypeParam t;
for (int64_t i = 0; i < size; ++i) { for (int64_t i = 0; i < size; ++i) {
...@@ -901,7 +905,7 @@ TYPED_TEST_P(SooTest, EraseInSmallTables) { ...@@ -901,7 +905,7 @@ TYPED_TEST_P(SooTest, EraseInSmallTables) {
} }
} }
TYPED_TEST_P(SooTest, InsertWithinCapacity) { TYPED_TEST(SooTest, InsertWithinCapacity) {
TypeParam t; TypeParam t;
t.reserve(10); t.reserve(10);
const size_t original_capacity = t.capacity(); const size_t original_capacity = t.capacity();
...@@ -935,9 +939,11 @@ TYPED_TEST_P(SooTest, InsertWithinCapacity) { ...@@ -935,9 +939,11 @@ TYPED_TEST_P(SooTest, InsertWithinCapacity) {
template <class TableType> template <class TableType>
class SmallTableResizeTest : public testing::Test {}; class SmallTableResizeTest : public testing::Test {};
TYPED_TEST_SUITE_P(SmallTableResizeTest); using SmallTableTypes =
::testing::Types<IntTable, TransferableIntTable, SooIntTable>;
TYPED_TEST_SUITE(SmallTableResizeTest, SmallTableTypes);
TYPED_TEST_P(SmallTableResizeTest, InsertIntoSmallTable) { TYPED_TEST(SmallTableResizeTest, InsertIntoSmallTable) {
TypeParam t; TypeParam t;
for (int i = 0; i < 32; ++i) { for (int i = 0; i < 32; ++i) {
t.insert(i); t.insert(i);
...@@ -949,7 +955,7 @@ TYPED_TEST_P(SmallTableResizeTest, InsertIntoSmallTable) { ...@@ -949,7 +955,7 @@ TYPED_TEST_P(SmallTableResizeTest, InsertIntoSmallTable) {
} }
} }
TYPED_TEST_P(SmallTableResizeTest, ResizeGrowSmallTables) { TYPED_TEST(SmallTableResizeTest, ResizeGrowSmallTables) {
for (size_t source_size = 0; source_size < 32; ++source_size) { for (size_t source_size = 0; source_size < 32; ++source_size) {
for (size_t target_size = source_size; target_size < 32; ++target_size) { for (size_t target_size = source_size; target_size < 32; ++target_size) {
for (bool rehash : {false, true}) { for (bool rehash : {false, true}) {
...@@ -971,7 +977,7 @@ TYPED_TEST_P(SmallTableResizeTest, ResizeGrowSmallTables) { ...@@ -971,7 +977,7 @@ TYPED_TEST_P(SmallTableResizeTest, ResizeGrowSmallTables) {
} }
} }
TYPED_TEST_P(SmallTableResizeTest, ResizeReduceSmallTables) { TYPED_TEST(SmallTableResizeTest, ResizeReduceSmallTables) {
for (size_t source_size = 0; source_size < 32; ++source_size) { for (size_t source_size = 0; source_size < 32; ++source_size) {
for (size_t target_size = 0; target_size <= source_size; ++target_size) { for (size_t target_size = 0; target_size <= source_size; ++target_size) {
TypeParam t; TypeParam t;
...@@ -994,13 +1000,6 @@ TYPED_TEST_P(SmallTableResizeTest, ResizeReduceSmallTables) { ...@@ -994,13 +1000,6 @@ TYPED_TEST_P(SmallTableResizeTest, ResizeReduceSmallTables) {
} }
} }
REGISTER_TYPED_TEST_SUITE_P(SmallTableResizeTest, InsertIntoSmallTable,
ResizeGrowSmallTables, ResizeReduceSmallTables);
using SmallTableTypes =
::testing::Types<IntTable, TransferableIntTable, SooIntTable>;
INSTANTIATE_TYPED_TEST_SUITE_P(InstanceSmallTableResizeTest,
SmallTableResizeTest, SmallTableTypes);
TEST(Table, LazyEmplace) { TEST(Table, LazyEmplace) {
StringTable t; StringTable t;
bool called = false; bool called = false;
...@@ -1019,13 +1018,13 @@ TEST(Table, LazyEmplace) { ...@@ -1019,13 +1018,13 @@ TEST(Table, LazyEmplace) {
EXPECT_THAT(*it, Pair("abc", "ABC")); EXPECT_THAT(*it, Pair("abc", "ABC"));
} }
TYPED_TEST_P(SooTest, ContainsEmpty) { TYPED_TEST(SooTest, ContainsEmpty) {
TypeParam t; TypeParam t;
EXPECT_FALSE(t.contains(0)); EXPECT_FALSE(t.contains(0));
} }
TYPED_TEST_P(SooTest, Contains1) { TYPED_TEST(SooTest, Contains1) {
TypeParam t; TypeParam t;
EXPECT_TRUE(t.insert(0).second); EXPECT_TRUE(t.insert(0).second);
...@@ -1036,7 +1035,7 @@ TYPED_TEST_P(SooTest, Contains1) { ...@@ -1036,7 +1035,7 @@ TYPED_TEST_P(SooTest, Contains1) {
EXPECT_FALSE(t.contains(0)); EXPECT_FALSE(t.contains(0));
} }
TYPED_TEST_P(SooTest, Contains2) { TYPED_TEST(SooTest, Contains2) {
TypeParam t; TypeParam t;
EXPECT_TRUE(t.insert(0).second); EXPECT_TRUE(t.insert(0).second);
...@@ -1334,7 +1333,7 @@ TEST(Table, RehashWithNoResize) { ...@@ -1334,7 +1333,7 @@ TEST(Table, RehashWithNoResize) {
} }
} }
TYPED_TEST_P(SooTest, InsertEraseStressTest) { TYPED_TEST(SooTest, InsertEraseStressTest) {
TypeParam t; TypeParam t;
const size_t kMinElementCount = 250; const size_t kMinElementCount = 250;
std::deque<int> keys; std::deque<int> keys;
...@@ -1363,7 +1362,7 @@ TEST(Table, InsertOverloads) { ...@@ -1363,7 +1362,7 @@ TEST(Table, InsertOverloads) {
Pair("DEF", "!!!"))); Pair("DEF", "!!!")));
} }
TYPED_TEST_P(SooTest, LargeTable) { TYPED_TEST(SooTest, LargeTable) {
TypeParam t; TypeParam t;
for (int64_t i = 0; i != 100000; ++i) t.emplace(i << 40); for (int64_t i = 0; i != 100000; ++i) t.emplace(i << 40);
for (int64_t i = 0; i != 100000; ++i) for (int64_t i = 0; i != 100000; ++i)
...@@ -1371,7 +1370,7 @@ TYPED_TEST_P(SooTest, LargeTable) { ...@@ -1371,7 +1370,7 @@ TYPED_TEST_P(SooTest, LargeTable) {
} }
// Timeout if copy is quadratic as it was in Rust. // Timeout if copy is quadratic as it was in Rust.
TYPED_TEST_P(SooTest, EnsureNonQuadraticAsInRust) { TYPED_TEST(SooTest, EnsureNonQuadraticAsInRust) {
static const size_t kLargeSize = 1 << 15; static const size_t kLargeSize = 1 << 15;
TypeParam t; TypeParam t;
...@@ -1384,7 +1383,7 @@ TYPED_TEST_P(SooTest, EnsureNonQuadraticAsInRust) { ...@@ -1384,7 +1383,7 @@ TYPED_TEST_P(SooTest, EnsureNonQuadraticAsInRust) {
for (const auto& entry : t) t2.insert(entry); for (const auto& entry : t) t2.insert(entry);
} }
TYPED_TEST_P(SooTest, ClearBug) { TYPED_TEST(SooTest, ClearBug) {
if (SwisstableGenerationsEnabled()) { if (SwisstableGenerationsEnabled()) {
GTEST_SKIP() << "Generations being enabled causes extra rehashes."; GTEST_SKIP() << "Generations being enabled causes extra rehashes.";
} }
...@@ -1411,7 +1410,7 @@ TYPED_TEST_P(SooTest, ClearBug) { ...@@ -1411,7 +1410,7 @@ TYPED_TEST_P(SooTest, ClearBug) {
capacity * sizeof(typename TypeParam::value_type)); capacity * sizeof(typename TypeParam::value_type));
} }
TYPED_TEST_P(SooTest, Erase) { TYPED_TEST(SooTest, Erase) {
TypeParam t; TypeParam t;
EXPECT_TRUE(t.find(0) == t.end()); EXPECT_TRUE(t.find(0) == t.end());
auto res = t.emplace(0); auto res = t.emplace(0);
...@@ -1422,7 +1421,7 @@ TYPED_TEST_P(SooTest, Erase) { ...@@ -1422,7 +1421,7 @@ TYPED_TEST_P(SooTest, Erase) {
EXPECT_TRUE(t.find(0) == t.end()); EXPECT_TRUE(t.find(0) == t.end());
} }
TYPED_TEST_P(SooTest, EraseMaintainsValidIterator) { TYPED_TEST(SooTest, EraseMaintainsValidIterator) {
TypeParam t; TypeParam t;
const int kNumElements = 100; const int kNumElements = 100;
for (int i = 0; i < kNumElements; i++) { for (int i = 0; i < kNumElements; i++) {
...@@ -1441,7 +1440,7 @@ TYPED_TEST_P(SooTest, EraseMaintainsValidIterator) { ...@@ -1441,7 +1440,7 @@ TYPED_TEST_P(SooTest, EraseMaintainsValidIterator) {
EXPECT_EQ(num_erase_calls, kNumElements); EXPECT_EQ(num_erase_calls, kNumElements);
} }
TYPED_TEST_P(SooTest, EraseBeginEnd) { TYPED_TEST(SooTest, EraseBeginEnd) {
TypeParam t; TypeParam t;
for (int i = 0; i < 10; ++i) t.insert(i); for (int i = 0; i < 10; ++i) t.insert(i);
EXPECT_EQ(t.size(), 10); EXPECT_EQ(t.size(), 10);
...@@ -1862,7 +1861,7 @@ TEST(Table, GrowthInfoDeletedBit) { ...@@ -1862,7 +1861,7 @@ TEST(Table, GrowthInfoDeletedBit) {
RawHashSetTestOnlyAccess::GetCommon(t).growth_info().HasNoDeleted()); RawHashSetTestOnlyAccess::GetCommon(t).growth_info().HasNoDeleted());
} }
TYPED_TEST_P(SooTest, Clear) { TYPED_TEST(SooTest, Clear) {
TypeParam t; TypeParam t;
EXPECT_TRUE(t.find(0) == t.end()); EXPECT_TRUE(t.find(0) == t.end());
t.clear(); t.clear();
...@@ -1875,7 +1874,7 @@ TYPED_TEST_P(SooTest, Clear) { ...@@ -1875,7 +1874,7 @@ TYPED_TEST_P(SooTest, Clear) {
EXPECT_TRUE(t.find(0) == t.end()); EXPECT_TRUE(t.find(0) == t.end());
} }
TYPED_TEST_P(SooTest, Swap) { TYPED_TEST(SooTest, Swap) {
TypeParam t; TypeParam t;
EXPECT_TRUE(t.find(0) == t.end()); EXPECT_TRUE(t.find(0) == t.end());
auto res = t.emplace(0); auto res = t.emplace(0);
...@@ -1889,7 +1888,7 @@ TYPED_TEST_P(SooTest, Swap) { ...@@ -1889,7 +1888,7 @@ TYPED_TEST_P(SooTest, Swap) {
EXPECT_THAT(*u.find(0), 0); EXPECT_THAT(*u.find(0), 0);
} }
TYPED_TEST_P(SooTest, Rehash) { TYPED_TEST(SooTest, Rehash) {
TypeParam t; TypeParam t;
EXPECT_TRUE(t.find(0) == t.end()); EXPECT_TRUE(t.find(0) == t.end());
t.emplace(0); t.emplace(0);
...@@ -1901,7 +1900,7 @@ TYPED_TEST_P(SooTest, Rehash) { ...@@ -1901,7 +1900,7 @@ TYPED_TEST_P(SooTest, Rehash) {
EXPECT_THAT(*t.find(1), 1); EXPECT_THAT(*t.find(1), 1);
} }
TYPED_TEST_P(SooTest, RehashDoesNotRehashWhenNotNecessary) { TYPED_TEST(SooTest, RehashDoesNotRehashWhenNotNecessary) {
TypeParam t; TypeParam t;
t.emplace(0); t.emplace(0);
t.emplace(1); t.emplace(1);
...@@ -1926,7 +1925,7 @@ TEST(Table, RehashZeroDeallocatesEmptyTable) { ...@@ -1926,7 +1925,7 @@ TEST(Table, RehashZeroDeallocatesEmptyTable) {
EXPECT_EQ(0, t.bucket_count()); EXPECT_EQ(0, t.bucket_count());
} }
TYPED_TEST_P(SooTest, RehashZeroForcesRehash) { TYPED_TEST(SooTest, RehashZeroForcesRehash) {
TypeParam t; TypeParam t;
t.emplace(0); t.emplace(0);
t.emplace(1); t.emplace(1);
...@@ -1943,7 +1942,7 @@ TEST(Table, ConstructFromInitList) { ...@@ -1943,7 +1942,7 @@ TEST(Table, ConstructFromInitList) {
StringTable t = {P(), Q(), {}, {{}, {}}}; StringTable t = {P(), Q(), {}, {{}, {}}};
} }
TYPED_TEST_P(SooTest, CopyConstruct) { TYPED_TEST(SooTest, CopyConstruct) {
TypeParam t; TypeParam t;
t.emplace(0); t.emplace(0);
EXPECT_EQ(1, t.size()); EXPECT_EQ(1, t.size());
...@@ -1964,7 +1963,7 @@ TYPED_TEST_P(SooTest, CopyConstruct) { ...@@ -1964,7 +1963,7 @@ TYPED_TEST_P(SooTest, CopyConstruct) {
} }
} }
TYPED_TEST_P(SooTest, CopyDifferentSizes) { TYPED_TEST(SooTest, CopyDifferentSizes) {
TypeParam t; TypeParam t;
for (int i = 0; i < 100; ++i) { for (int i = 0; i < 100; ++i) {
...@@ -1978,7 +1977,7 @@ TYPED_TEST_P(SooTest, CopyDifferentSizes) { ...@@ -1978,7 +1977,7 @@ TYPED_TEST_P(SooTest, CopyDifferentSizes) {
} }
} }
TYPED_TEST_P(SooTest, CopyDifferentCapacities) { TYPED_TEST(SooTest, CopyDifferentCapacities) {
for (int cap = 1; cap < 100; cap = cap * 2 + 1) { for (int cap = 1; cap < 100; cap = cap * 2 + 1) {
TypeParam t; TypeParam t;
t.reserve(static_cast<size_t>(cap)); t.reserve(static_cast<size_t>(cap));
...@@ -2127,7 +2126,7 @@ TEST(Table, Equality3) { ...@@ -2127,7 +2126,7 @@ TEST(Table, Equality3) {
EXPECT_NE(u, t); EXPECT_NE(u, t);
} }
TYPED_TEST_P(SooTest, NumDeletedRegression) { TYPED_TEST(SooTest, NumDeletedRegression) {
TypeParam t; TypeParam t;
t.emplace(0); t.emplace(0);
t.erase(t.find(0)); t.erase(t.find(0));
...@@ -2136,7 +2135,7 @@ TYPED_TEST_P(SooTest, NumDeletedRegression) { ...@@ -2136,7 +2135,7 @@ TYPED_TEST_P(SooTest, NumDeletedRegression) {
t.clear(); t.clear();
} }
TYPED_TEST_P(SooTest, FindFullDeletedRegression) { TYPED_TEST(SooTest, FindFullDeletedRegression) {
TypeParam t; TypeParam t;
for (int i = 0; i < 1000; ++i) { for (int i = 0; i < 1000; ++i) {
t.emplace(i); t.emplace(i);
...@@ -2145,7 +2144,7 @@ TYPED_TEST_P(SooTest, FindFullDeletedRegression) { ...@@ -2145,7 +2144,7 @@ TYPED_TEST_P(SooTest, FindFullDeletedRegression) {
EXPECT_EQ(0, t.size()); EXPECT_EQ(0, t.size());
} }
TYPED_TEST_P(SooTest, ReplacingDeletedSlotDoesNotRehash) { TYPED_TEST(SooTest, ReplacingDeletedSlotDoesNotRehash) {
// We need to disable hashtablez to avoid issues related to SOO and sampling. // We need to disable hashtablez to avoid issues related to SOO and sampling.
SetHashtablezEnabled(false); SetHashtablezEnabled(false);
...@@ -2409,7 +2408,7 @@ TEST(Nodes, ExtractInsert) { ...@@ -2409,7 +2408,7 @@ TEST(Nodes, ExtractInsert) {
EXPECT_FALSE(node); // NOLINT(bugprone-use-after-move) EXPECT_FALSE(node); // NOLINT(bugprone-use-after-move)
} }
TYPED_TEST_P(SooTest, HintInsert) { TYPED_TEST(SooTest, HintInsert) {
TypeParam t = {1, 2, 3}; TypeParam t = {1, 2, 3};
auto node = t.extract(1); auto node = t.extract(1);
EXPECT_THAT(t, UnorderedElementsAre(2, 3)); EXPECT_THAT(t, UnorderedElementsAre(2, 3));
...@@ -2449,7 +2448,7 @@ std::vector<int> OrderOfIteration(const T& t) { ...@@ -2449,7 +2448,7 @@ std::vector<int> OrderOfIteration(const T& t) {
// we are touching different memory pages to cause the ordering to change. // we are touching different memory pages to cause the ordering to change.
// We also need to keep the old tables around to avoid getting the same memory // We also need to keep the old tables around to avoid getting the same memory
// blocks over and over. // blocks over and over.
TYPED_TEST_P(SooTest, IterationOrderChangesByInstance) { TYPED_TEST(SooTest, IterationOrderChangesByInstance) {
for (size_t size : {2, 6, 12, 20}) { for (size_t size : {2, 6, 12, 20}) {
const auto reference_table = MakeSimpleTable<TypeParam>(size); const auto reference_table = MakeSimpleTable<TypeParam>(size);
const auto reference = OrderOfIteration(reference_table); const auto reference = OrderOfIteration(reference_table);
...@@ -2468,7 +2467,7 @@ TYPED_TEST_P(SooTest, IterationOrderChangesByInstance) { ...@@ -2468,7 +2467,7 @@ TYPED_TEST_P(SooTest, IterationOrderChangesByInstance) {
} }
} }
TYPED_TEST_P(SooTest, IterationOrderChangesOnRehash) { TYPED_TEST(SooTest, IterationOrderChangesOnRehash) {
// We test different sizes with many small numbers, because small table // We test different sizes with many small numbers, because small table
// resize has a different codepath. // resize has a different codepath.
// Note: iteration order for size() <= 1 is always the same. // Note: iteration order for size() <= 1 is always the same.
...@@ -2501,7 +2500,7 @@ TYPED_TEST_P(SooTest, IterationOrderChangesOnRehash) { ...@@ -2501,7 +2500,7 @@ TYPED_TEST_P(SooTest, IterationOrderChangesOnRehash) {
// Verify that pointers are invalidated as soon as a second element is inserted. // Verify that pointers are invalidated as soon as a second element is inserted.
// This prevents dependency on pointer stability on small tables. // This prevents dependency on pointer stability on small tables.
TYPED_TEST_P(SooTest, UnstablePointers) { TYPED_TEST(SooTest, UnstablePointers) {
// We need to disable hashtablez to avoid issues related to SOO and sampling. // We need to disable hashtablez to avoid issues related to SOO and sampling.
SetHashtablezEnabled(false); SetHashtablezEnabled(false);
...@@ -2571,7 +2570,7 @@ constexpr bool kMsvc = true; ...@@ -2571,7 +2570,7 @@ constexpr bool kMsvc = true;
constexpr bool kMsvc = false; constexpr bool kMsvc = false;
#endif #endif
TYPED_TEST_P(SooTest, IteratorInvalidAssertsEqualityOperator) { TYPED_TEST(SooTest, IteratorInvalidAssertsEqualityOperator) {
if (!IsAssertEnabled() && !SwisstableGenerationsEnabled()) if (!IsAssertEnabled() && !SwisstableGenerationsEnabled())
GTEST_SKIP() << "Assertions not enabled."; GTEST_SKIP() << "Assertions not enabled.";
...@@ -2608,7 +2607,7 @@ TYPED_TEST_P(SooTest, IteratorInvalidAssertsEqualityOperator) { ...@@ -2608,7 +2607,7 @@ TYPED_TEST_P(SooTest, IteratorInvalidAssertsEqualityOperator) {
EXPECT_DEATH_IF_SUPPORTED(void(iter2 == iter1), kContainerDiffDeathMessage); EXPECT_DEATH_IF_SUPPORTED(void(iter2 == iter1), kContainerDiffDeathMessage);
} }
TYPED_TEST_P(SooTest, IteratorInvalidAssertsEqualityOperatorRehash) { TYPED_TEST(SooTest, IteratorInvalidAssertsEqualityOperatorRehash) {
if (!IsAssertEnabled() && !SwisstableGenerationsEnabled()) if (!IsAssertEnabled() && !SwisstableGenerationsEnabled())
GTEST_SKIP() << "Assertions not enabled."; GTEST_SKIP() << "Assertions not enabled.";
if (kMsvc) GTEST_SKIP() << "MSVC doesn't support | in regex."; if (kMsvc) GTEST_SKIP() << "MSVC doesn't support | in regex.";
...@@ -2634,9 +2633,11 @@ TYPED_TEST_P(SooTest, IteratorInvalidAssertsEqualityOperatorRehash) { ...@@ -2634,9 +2633,11 @@ TYPED_TEST_P(SooTest, IteratorInvalidAssertsEqualityOperatorRehash) {
#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE) #if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
template <typename T> template <typename T>
class RawHashSamplerTest : public testing::Test {}; class RawHashSamplerTest : public testing::Test {};
TYPED_TEST_SUITE_P(RawHashSamplerTest);
TYPED_TEST_P(RawHashSamplerTest, Sample) { using RawHashSamplerTestTypes = ::testing::Types<SooIntTable, NonSooIntTable>;
TYPED_TEST_SUITE(RawHashSamplerTest, RawHashSamplerTestTypes);
TYPED_TEST(RawHashSamplerTest, Sample) {
constexpr bool soo_enabled = std::is_same<SooIntTable, TypeParam>::value; constexpr bool soo_enabled = std::is_same<SooIntTable, TypeParam>::value;
// Enable the feature even if the prod default is off. // Enable the feature even if the prod default is off.
SetHashtablezEnabled(true); SetHashtablezEnabled(true);
...@@ -2708,10 +2709,6 @@ TYPED_TEST_P(RawHashSamplerTest, Sample) { ...@@ -2708,10 +2709,6 @@ TYPED_TEST_P(RawHashSamplerTest, Sample) {
} }
} }
REGISTER_TYPED_TEST_SUITE_P(RawHashSamplerTest, Sample);
using RawHashSamplerTestTypes = ::testing::Types<SooIntTable, NonSooIntTable>;
INSTANTIATE_TYPED_TEST_SUITE_P(My, RawHashSamplerTest, RawHashSamplerTestTypes);
std::vector<const HashtablezInfo*> SampleSooMutation( std::vector<const HashtablezInfo*> SampleSooMutation(
absl::FunctionRef<void(SooIntTable&)> mutate_table) { absl::FunctionRef<void(SooIntTable&)> mutate_table) {
// Enable the feature even if the prod default is off. // Enable the feature even if the prod default is off.
...@@ -2867,9 +2864,10 @@ TEST(RawHashSamplerTest, DoNotSampleCustomAllocators) { ...@@ -2867,9 +2864,10 @@ TEST(RawHashSamplerTest, DoNotSampleCustomAllocators) {
template <class TableType> template <class TableType>
class SanitizerTest : public testing::Test {}; class SanitizerTest : public testing::Test {};
TYPED_TEST_SUITE_P(SanitizerTest); using SanitizerTableTypes = ::testing::Types<IntTable, TransferableIntTable>;
TYPED_TEST_SUITE(SanitizerTest, SanitizerTableTypes);
TYPED_TEST_P(SanitizerTest, PoisoningUnused) { TYPED_TEST(SanitizerTest, PoisoningUnused) {
TypeParam t; TypeParam t;
for (size_t reserve_size = 2; reserve_size < 1024; for (size_t reserve_size = 2; reserve_size < 1024;
reserve_size = reserve_size * 3 / 2) { reserve_size = reserve_size * 3 / 2) {
...@@ -2887,11 +2885,6 @@ TYPED_TEST_P(SanitizerTest, PoisoningUnused) { ...@@ -2887,11 +2885,6 @@ TYPED_TEST_P(SanitizerTest, PoisoningUnused) {
} }
} }
REGISTER_TYPED_TEST_SUITE_P(SanitizerTest, PoisoningUnused);
using SanitizerTableTypes = ::testing::Types<IntTable, TransferableIntTable>;
INSTANTIATE_TYPED_TEST_SUITE_P(InstanceSanitizerTest, SanitizerTest,
SanitizerTableTypes);
// TODO(b/289225379): poison inline space when empty SOO. // TODO(b/289225379): poison inline space when empty SOO.
TEST(Sanitizer, PoisoningOnErase) { TEST(Sanitizer, PoisoningOnErase) {
NonSooIntTable t; NonSooIntTable t;
...@@ -3006,7 +2999,7 @@ TEST(Iterator, InvalidUseWithMoveCrashesWithSanitizers) { ...@@ -3006,7 +2999,7 @@ TEST(Iterator, InvalidUseWithMoveCrashesWithSanitizers) {
#endif #endif
} }
TYPED_TEST_P(SooTest, ReservedGrowthUpdatesWhenTableDoesntGrow) { TYPED_TEST(SooTest, ReservedGrowthUpdatesWhenTableDoesntGrow) {
TypeParam t; TypeParam t;
for (int i = 0; i < 8; ++i) t.insert(i); for (int i = 0; i < 8; ++i) t.insert(i);
// Want to insert twice without invalidating iterators so reserve. // Want to insert twice without invalidating iterators so reserve.
...@@ -3021,6 +3014,141 @@ TYPED_TEST_P(SooTest, ReservedGrowthUpdatesWhenTableDoesntGrow) { ...@@ -3021,6 +3014,141 @@ TYPED_TEST_P(SooTest, ReservedGrowthUpdatesWhenTableDoesntGrow) {
EXPECT_EQ(*it, 0); EXPECT_EQ(*it, 0);
} }
template <class TableType>
class InstanceTrackerTest : public testing::Test {};
using ::absl::test_internal::CopyableMovableInstance;
using ::absl::test_internal::InstanceTracker;
struct InstanceTrackerHash {
size_t operator()(const CopyableMovableInstance& t) const {
return absl::HashOf(t.value());
}
};
using InstanceTrackerTableTypes = ::testing::Types<
absl::node_hash_set<CopyableMovableInstance, InstanceTrackerHash>,
absl::flat_hash_set<CopyableMovableInstance, InstanceTrackerHash>>;
TYPED_TEST_SUITE(InstanceTrackerTest, InstanceTrackerTableTypes);
TYPED_TEST(InstanceTrackerTest, EraseIfAll) {
using Table = TypeParam;
InstanceTracker tracker;
for (int size = 0; size < 100; ++size) {
Table t;
for (int i = 0; i < size; ++i) {
t.emplace(i);
}
absl::erase_if(t, [](const auto&) { return true; });
ASSERT_EQ(t.size(), 0);
}
EXPECT_EQ(tracker.live_instances(), 0);
}
TYPED_TEST(InstanceTrackerTest, EraseIfNone) {
using Table = TypeParam;
InstanceTracker tracker;
{
Table t;
for (size_t size = 0; size < 100; ++size) {
absl::erase_if(t, [](const auto&) { return false; });
ASSERT_EQ(t.size(), size);
t.emplace(size);
}
}
EXPECT_EQ(tracker.live_instances(), 0);
}
TYPED_TEST(InstanceTrackerTest, EraseIfPartial) {
using Table = TypeParam;
InstanceTracker tracker;
for (int mod : {0, 1}) {
for (int size = 0; size < 100; ++size) {
SCOPED_TRACE(absl::StrCat(mod, " ", size));
Table t;
std::vector<CopyableMovableInstance> expected;
for (int i = 0; i < size; ++i) {
t.emplace(i);
if (i % 2 != mod) {
expected.emplace_back(i);
}
}
absl::erase_if(t, [mod](const auto& x) { return x.value() % 2 == mod; });
ASSERT_THAT(t, testing::UnorderedElementsAreArray(expected));
}
}
EXPECT_EQ(tracker.live_instances(), 0);
}
TYPED_TEST(SooTest, EraseIfAll) {
auto pred = [](const auto&) { return true; };
for (int size = 0; size < 100; ++size) {
TypeParam t;
for (int i = 0; i < size; ++i) t.insert(i);
absl::container_internal::EraseIf(pred, &t);
ASSERT_EQ(t.size(), 0);
}
}
TYPED_TEST(SooTest, EraseIfNone) {
auto pred = [](const auto&) { return false; };
TypeParam t;
for (size_t size = 0; size < 100; ++size) {
absl::container_internal::EraseIf(pred, &t);
ASSERT_EQ(t.size(), size);
t.insert(size);
}
}
TYPED_TEST(SooTest, EraseIfPartial) {
for (int mod : {0, 1}) {
auto pred = [&](const auto& x) {
return static_cast<int64_t>(x) % 2 == mod;
};
for (int size = 0; size < 100; ++size) {
SCOPED_TRACE(absl::StrCat(mod, " ", size));
TypeParam t;
std::vector<int64_t> expected;
for (int i = 0; i < size; ++i) {
t.insert(i);
if (i % 2 != mod) {
expected.push_back(i);
}
}
absl::container_internal::EraseIf(pred, &t);
ASSERT_THAT(t, testing::UnorderedElementsAreArray(expected));
}
}
}
// Test that we are allowed to erase during the callback in erase_if.
// TODO(b/345744331): Consider to change behavior to disallow erasure in the
// callback.
TYPED_TEST(SooTest, EraseIfReentry) {
for (int size = 0; size < 100; ++size) {
SCOPED_TRACE(absl::StrCat(size));
TypeParam t;
std::vector<int64_t> expected;
for (int i = 0; i < size; ++i) {
t.insert(i);
if (i % 4 == 1 || i % 4 == 2) {
expected.push_back(i);
}
}
auto pred = [&](const auto& x) {
auto value = static_cast<int64_t>(x);
int64_t group = value / 4;
t.erase(group * 4);
if (value % 4 == 3) {
return true;
}
return false;
};
absl::container_internal::EraseIf(pred, &t);
ASSERT_THAT(t, testing::UnorderedElementsAreArray(expected));
}
}
TEST(Table, EraseBeginEndResetsReservedGrowth) { TEST(Table, EraseBeginEndResetsReservedGrowth) {
bool frozen = false; bool frozen = false;
BadHashFreezableIntTable t{FreezableAlloc<int64_t>(&frozen)}; BadHashFreezableIntTable t{FreezableAlloc<int64_t>(&frozen)};
...@@ -3031,7 +3159,8 @@ TEST(Table, EraseBeginEndResetsReservedGrowth) { ...@@ -3031,7 +3159,8 @@ TEST(Table, EraseBeginEndResetsReservedGrowth) {
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
// Create a long run (hash function returns constant). // Create a long run (hash function returns constant).
for (int j = 0; j < 100; ++j) t.insert(j); for (int j = 0; j < 100; ++j) t.insert(j);
// Erase elements from the middle of the long run, which creates tombstones. // Erase elements from the middle of the long run, which creates
// tombstones.
for (int j = 30; j < 60; ++j) t.erase(j); for (int j = 30; j < 60; ++j) t.erase(j);
EXPECT_EQ(t.size(), 70); EXPECT_EQ(t.size(), 70);
EXPECT_EQ(t.capacity(), cap); EXPECT_EQ(t.capacity(), cap);
...@@ -3181,12 +3310,12 @@ TEST(Table, IterateOverFullSlotsEmpty) { ...@@ -3181,12 +3310,12 @@ TEST(Table, IterateOverFullSlotsEmpty) {
auto fail_if_any = [](const ctrl_t*, auto* i) { auto fail_if_any = [](const ctrl_t*, auto* i) {
FAIL() << "expected no slots " << **i; FAIL() << "expected no slots " << **i;
}; };
container_internal::IterateOverFullSlots( container_internal::IterateOverFullSlots</*kAllowRemoveReentrance=*/false>(
RawHashSetTestOnlyAccess::GetCommon(t), RawHashSetTestOnlyAccess::GetCommon(t),
RawHashSetTestOnlyAccess::GetSlots(t), fail_if_any); RawHashSetTestOnlyAccess::GetSlots(t), fail_if_any);
for (size_t i = 0; i < 256; ++i) { for (size_t i = 0; i < 256; ++i) {
t.reserve(i); t.reserve(i);
container_internal::IterateOverFullSlots( container_internal::IterateOverFullSlots</*kAllowRemoveReentrance=*/false>(
RawHashSetTestOnlyAccess::GetCommon(t), RawHashSetTestOnlyAccess::GetCommon(t),
RawHashSetTestOnlyAccess::GetSlots(t), fail_if_any); RawHashSetTestOnlyAccess::GetSlots(t), fail_if_any);
} }
...@@ -3196,12 +3325,12 @@ TEST(Table, IterateOverFullSlotsFull) { ...@@ -3196,12 +3325,12 @@ TEST(Table, IterateOverFullSlotsFull) {
NonSooIntTable t; NonSooIntTable t;
std::vector<int64_t> expected_slots; std::vector<int64_t> expected_slots;
for (int64_t i = 0; i < 128; ++i) { for (int64_t idx = 0; idx < 128; ++idx) {
t.insert(i); t.insert(idx);
expected_slots.push_back(i); expected_slots.push_back(idx);
std::vector<int64_t> slots; std::vector<int64_t> slots;
container_internal::IterateOverFullSlots( container_internal::IterateOverFullSlots</*kAllowRemoveReentrance=*/false>(
RawHashSetTestOnlyAccess::GetCommon(t), RawHashSetTestOnlyAccess::GetCommon(t),
RawHashSetTestOnlyAccess::GetSlots(t), RawHashSetTestOnlyAccess::GetSlots(t),
[&t, &slots](const ctrl_t* ctrl, auto* i) { [&t, &slots](const ctrl_t* ctrl, auto* i) {
...@@ -3215,11 +3344,49 @@ TEST(Table, IterateOverFullSlotsFull) { ...@@ -3215,11 +3344,49 @@ TEST(Table, IterateOverFullSlotsFull) {
} }
} }
TEST(Table, IterateOverFullSlotsAllowReentrance) {
std::vector<int64_t> expected_values;
for (int64_t idx = 0; idx < 128; ++idx) {
NonSooIntTable t;
for (int val = 0; val <= idx; ++val) {
t.insert(val);
}
// We are inserting only even values.
// Only one element across 2*k and 2*k+1 should be visited.
if (idx % 2 == 0) {
expected_values.push_back(idx);
}
std::vector<int64_t> actual_values;
container_internal::IterateOverFullSlots</*kAllowRemoveReentrance=*/true>(
RawHashSetTestOnlyAccess::GetCommon(t),
RawHashSetTestOnlyAccess::GetSlots(t),
[&t, &actual_values](const ctrl_t* ctrl, auto* i) {
int64_t value = **i;
// Erase the other element from 2*k and 2*k+1 pair.
t.erase(value ^ 1);
ptrdiff_t ctrl_offset =
ctrl - RawHashSetTestOnlyAccess::GetCommon(t).control();
ptrdiff_t slot_offset = i - RawHashSetTestOnlyAccess::GetSlots(t);
ASSERT_EQ(ctrl_offset, slot_offset);
// Add an even value from the pair.
// Only one element for each 2*k and 2*k+1 pair should be inserted.
actual_values.push_back(value - value % 2);
});
EXPECT_THAT(actual_values,
testing::UnorderedElementsAreArray(expected_values));
}
}
template <typename T> template <typename T>
class SooTable : public testing::Test {}; class SooTable : public testing::Test {};
TYPED_TEST_SUITE_P(SooTable); using FreezableSooTableTypes =
::testing::Types<FreezableSizedValueSooTable<8>,
FreezableSizedValueSooTable<16>>;
TYPED_TEST_SUITE(SooTable, FreezableSooTableTypes);
TYPED_TEST_P(SooTable, Basic) { TYPED_TEST(SooTable, Basic) {
bool frozen = true; bool frozen = true;
TypeParam t{FreezableAlloc<typename TypeParam::value_type>(&frozen)}; TypeParam t{FreezableAlloc<typename TypeParam::value_type>(&frozen)};
if (t.capacity() != SooCapacity()) { if (t.capacity() != SooCapacity()) {
...@@ -3249,12 +3416,6 @@ TYPED_TEST_P(SooTable, Basic) { ...@@ -3249,12 +3416,6 @@ TYPED_TEST_P(SooTable, Basic) {
EXPECT_EQ(t.size(), 0); EXPECT_EQ(t.size(), 0);
} }
REGISTER_TYPED_TEST_SUITE_P(SooTable, Basic);
using FreezableSooTableTypes =
::testing::Types<FreezableSizedValueSooTable<8>,
FreezableSizedValueSooTable<16>>;
INSTANTIATE_TYPED_TEST_SUITE_P(My, SooTable, FreezableSooTableTypes);
TEST(Table, RehashToSooUnsampled) { TEST(Table, RehashToSooUnsampled) {
SooIntTable t; SooIntTable t;
if (t.capacity() != SooCapacity()) { if (t.capacity() != SooCapacity()) {
...@@ -3327,21 +3488,6 @@ TEST(Iterator, InconsistentHashEqFunctorsValidation) { ...@@ -3327,21 +3488,6 @@ TEST(Iterator, InconsistentHashEqFunctorsValidation) {
"hash/eq functors are inconsistent."); "hash/eq functors are inconsistent.");
} }
REGISTER_TYPED_TEST_SUITE_P(
SooTest, Empty, Clear, ClearBug, Contains1, Contains2, ContainsEmpty,
CopyConstruct, CopyDifferentCapacities, CopyDifferentSizes,
EnsureNonQuadraticAsInRust, Erase, EraseBeginEnd, EraseInSmallTables,
EraseMaintainsValidIterator, FindFullDeletedRegression, HintInsert, Insert1,
Insert2, InsertEraseStressTest, InsertWithinCapacity,
IterationOrderChangesByInstance, IterationOrderChangesOnRehash,
IteratorInvalidAssertsEqualityOperator,
IteratorInvalidAssertsEqualityOperatorRehash, LargeTable, LookupEmpty,
NumDeletedRegression, Rehash, RehashDoesNotRehashWhenNotNecessary,
RehashZeroForcesRehash, ReplacingDeletedSlotDoesNotRehash,
ReservedGrowthUpdatesWhenTableDoesntGrow, Swap, UnstablePointers);
using SooTableTypes = ::testing::Types<SooIntTable, NonSooIntTable>;
INSTANTIATE_TYPED_TEST_SUITE_P(My, SooTest, SooTableTypes);
} // namespace } // namespace
} // namespace container_internal } // namespace container_internal
ABSL_NAMESPACE_END ABSL_NAMESPACE_END
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment