Commit 3132b83a by Evan Brown Committed by Copybara-Service

Add pointer-stability validation in btree.

When we insert a new element, when ASan is enabled, we replace the node that the new element is on in order to try to detect cases of code depending on pointer/reference-stability.

PiperOrigin-RevId: 528826645
Change-Id: Ie5c15c13016a8aa831a0d1edc3ad33c1f5ca4d65
parent c0d58db0
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
#include <array> #include <array>
#include <cstdint> #include <cstdint>
#include <functional> #include <functional>
#include <iostream>
#include <iterator> #include <iterator>
#include <limits> #include <limits>
#include <map> #include <map>
...@@ -1480,9 +1481,17 @@ void ExpectOperationCounts(const int expected_moves, ...@@ -1480,9 +1481,17 @@ void ExpectOperationCounts(const int expected_moves,
tracker->ResetCopiesMovesSwaps(); tracker->ResetCopiesMovesSwaps();
} }
#ifdef ABSL_HAVE_ADDRESS_SANITIZER
constexpr bool kAsan = true;
#else
constexpr bool kAsan = false;
#endif
// Note: when the values in this test change, it is expected to have an impact // Note: when the values in this test change, it is expected to have an impact
// on performance. // on performance.
TEST(Btree, MovesComparisonsCopiesSwapsTracking) { TEST(Btree, MovesComparisonsCopiesSwapsTracking) {
if (kAsan) GTEST_SKIP() << "We do extra operations in ASan mode.";
InstanceTracker tracker; InstanceTracker tracker;
// Note: this is minimum number of values per node. // Note: this is minimum number of values per node.
SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/4> set4; SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/4> set4;
...@@ -1534,6 +1543,8 @@ struct MovableOnlyInstanceThreeWayCompare { ...@@ -1534,6 +1543,8 @@ struct MovableOnlyInstanceThreeWayCompare {
// Note: when the values in this test change, it is expected to have an impact // Note: when the values in this test change, it is expected to have an impact
// on performance. // on performance.
TEST(Btree, MovesComparisonsCopiesSwapsTrackingThreeWayCompare) { TEST(Btree, MovesComparisonsCopiesSwapsTrackingThreeWayCompare) {
if (kAsan) GTEST_SKIP() << "We do extra operations in ASan mode.";
InstanceTracker tracker; InstanceTracker tracker;
// Note: this is minimum number of values per node. // Note: this is minimum number of values per node.
SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/4, SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/4,
...@@ -3218,19 +3229,24 @@ TEST(Btree, InvalidIteratorUse) { ...@@ -3218,19 +3229,24 @@ TEST(Btree, InvalidIteratorUse) {
if (!BtreeNodePeer::UsesGenerations<absl::btree_set<int>>()) if (!BtreeNodePeer::UsesGenerations<absl::btree_set<int>>())
GTEST_SKIP() << "Generation validation for iterators is disabled."; GTEST_SKIP() << "Generation validation for iterators is disabled.";
// Invalid memory use can trigger heap-use-after-free in ASan or invalidated
// iterator assertions.
constexpr const char *kInvalidMemoryDeathMessage =
"heap-use-after-free|invalidated iterator";
{ {
absl::btree_set<int> set; absl::btree_set<int> set;
for (int i = 0; i < 10; ++i) set.insert(i); for (int i = 0; i < 10; ++i) set.insert(i);
auto it = set.begin(); auto it = set.begin();
set.erase(it++); set.erase(it++);
EXPECT_DEATH(set.erase(it++), "invalidated iterator"); EXPECT_DEATH(set.erase(it++), kInvalidMemoryDeathMessage);
} }
{ {
absl::btree_set<int> set; absl::btree_set<int> set;
for (int i = 0; i < 10; ++i) set.insert(i); for (int i = 0; i < 10; ++i) set.insert(i);
auto it = set.insert(20).first; auto it = set.insert(20).first;
set.insert(30); set.insert(30);
EXPECT_DEATH(*it, "invalidated iterator"); EXPECT_DEATH(*it, kInvalidMemoryDeathMessage);
} }
{ {
absl::btree_set<int> set; absl::btree_set<int> set;
...@@ -3238,15 +3254,15 @@ TEST(Btree, InvalidIteratorUse) { ...@@ -3238,15 +3254,15 @@ TEST(Btree, InvalidIteratorUse) {
auto it = set.find(5000); auto it = set.find(5000);
ASSERT_NE(it, set.end()); ASSERT_NE(it, set.end());
set.erase(1); set.erase(1);
EXPECT_DEATH(*it, "invalidated iterator"); EXPECT_DEATH(*it, kInvalidMemoryDeathMessage);
} }
{ {
absl::btree_set<int> set; absl::btree_set<int> set;
for (int i = 0; i < 10; ++i) set.insert(i); for (int i = 0; i < 10; ++i) set.insert(i);
auto it = set.insert(20).first; auto it = set.insert(20).first;
set.insert(30); set.insert(30);
EXPECT_DEATH(void(it == set.begin()), "invalidated iterator"); EXPECT_DEATH(void(it == set.begin()), kInvalidMemoryDeathMessage);
EXPECT_DEATH(void(set.begin() == it), "invalidated iterator"); EXPECT_DEATH(void(set.begin() == it), kInvalidMemoryDeathMessage);
} }
} }
#endif #endif
...@@ -3537,6 +3553,22 @@ TEST(Btree, InvalidIteratorComparison) { ...@@ -3537,6 +3553,22 @@ TEST(Btree, InvalidIteratorComparison) {
EXPECT_DEATH(void(iter2 == iter1), kDifferentContainerDeathMessage); EXPECT_DEATH(void(iter2 == iter1), kDifferentContainerDeathMessage);
} }
TEST(Btree, InvalidPointerUse) {
if (!kAsan)
GTEST_SKIP() << "We only detect invalid pointer use in ASan mode.";
absl::btree_set<int> set;
set.insert(0);
const int *ptr = &*set.begin();
set.insert(1);
EXPECT_DEATH(std::cout << *ptr, "heap-use-after-free");
size_t slots_per_node = BtreeNodePeer::GetNumSlotsPerNode<decltype(set)>();
for (int i = 2; i < slots_per_node - 1; ++i) set.insert(i);
ptr = &*set.begin();
set.insert(static_cast<int>(slots_per_node));
EXPECT_DEATH(std::cout << *ptr, "heap-use-after-free");
}
} // namespace } // namespace
} // namespace container_internal } // namespace container_internal
ABSL_NAMESPACE_END ABSL_NAMESPACE_END
......
...@@ -1095,9 +1095,9 @@ class btree_iterator_generation_info_enabled { ...@@ -1095,9 +1095,9 @@ class btree_iterator_generation_info_enabled {
class btree_iterator_generation_info_disabled { class btree_iterator_generation_info_disabled {
public: public:
explicit btree_iterator_generation_info_disabled(uint32_t) {} explicit btree_iterator_generation_info_disabled(uint32_t) {}
void update_generation(const void *) {} static void update_generation(const void *) {}
uint32_t generation() const { return 0; } static uint32_t generation() { return 0; }
void assert_valid_generation(const void *) const {} static void assert_valid_generation(const void *) {}
}; };
#ifdef ABSL_BTREE_ENABLE_GENERATIONS #ifdef ABSL_BTREE_ENABLE_GENERATIONS
...@@ -2831,28 +2831,58 @@ inline auto btree<P>::internal_emplace(iterator iter, Args &&...args) ...@@ -2831,28 +2831,58 @@ inline auto btree<P>::internal_emplace(iterator iter, Args &&...args)
} }
const field_type max_count = iter.node_->max_count(); const field_type max_count = iter.node_->max_count();
allocator_type *alloc = mutable_allocator(); allocator_type *alloc = mutable_allocator();
const auto transfer_and_delete = [&](node_type *old_node,
node_type *new_node) {
new_node->transfer_n(old_node->count(), new_node->start(),
old_node->start(), old_node, alloc);
new_node->set_finish(old_node->finish());
old_node->set_finish(old_node->start());
new_node->set_generation(old_node->generation());
node_type::clear_and_delete(old_node, alloc);
};
const auto replace_leaf_root_node = [&](field_type new_node_size) {
assert(iter.node_ == root());
node_type *old_root = iter.node_;
node_type *new_root = iter.node_ = new_leaf_root_node(new_node_size);
transfer_and_delete(old_root, new_root);
mutable_root() = mutable_rightmost() = new_root;
};
bool replaced_node = false;
if (iter.node_->count() == max_count) { if (iter.node_->count() == max_count) {
// Make room in the leaf for the new item. // Make room in the leaf for the new item.
if (max_count < kNodeSlots) { if (max_count < kNodeSlots) {
// Insertion into the root where the root is smaller than the full node // Insertion into the root where the root is smaller than the full node
// size. Simply grow the size of the root node. // size. Simply grow the size of the root node.
assert(iter.node_ == root()); replace_leaf_root_node(static_cast<field_type>(
iter.node_ = new_leaf_root_node(static_cast<field_type>(
(std::min)(static_cast<int>(kNodeSlots), 2 * max_count))); (std::min)(static_cast<int>(kNodeSlots), 2 * max_count)));
// Transfer the values from the old root to the new root. replaced_node = true;
node_type *old_root = root();
node_type *new_root = iter.node_;
new_root->transfer_n(old_root->count(), new_root->start(),
old_root->start(), old_root, alloc);
new_root->set_finish(old_root->finish());
old_root->set_finish(old_root->start());
new_root->set_generation(old_root->generation());
node_type::clear_and_delete(old_root, alloc);
mutable_root() = mutable_rightmost() = new_root;
} else { } else {
rebalance_or_split(&iter); rebalance_or_split(&iter);
} }
} }
(void)replaced_node;
#ifdef ABSL_HAVE_ADDRESS_SANITIZER
if (!replaced_node) {
assert(iter.node_->is_leaf());
if (iter.node_->is_root()) {
replace_leaf_root_node(max_count);
} else {
node_type *old_node = iter.node_;
const bool was_rightmost = rightmost() == old_node;
const bool was_leftmost = leftmost() == old_node;
node_type *parent = old_node->parent();
const field_type position = old_node->position();
node_type *new_node = iter.node_ = new_leaf_node(position, parent);
parent->set_child_noupdate_position(position, new_node);
transfer_and_delete(old_node, new_node);
if (was_rightmost) mutable_rightmost() = new_node;
// The leftmost node is stored as the parent of the root node.
if (was_leftmost) root()->set_parent(new_node);
}
}
#endif
iter.node_->emplace_value(static_cast<field_type>(iter.position_), alloc, iter.node_->emplace_value(static_cast<field_type>(iter.position_), alloc,
std::forward<Args>(args)...); std::forward<Args>(args)...);
assert( assert(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment