Commit 2e9532cc by Abseil Team Committed by Gennadiy Rozental

Export of internal Abseil changes

--
5ed5dc9e17c66c298ee31cefc941a46348d8ad34 by Abseil Team <absl-team@google.com>:

Fix typo.

PiperOrigin-RevId: 362040582

--
ac704b53a49becc42f77e4529d3952f8e7d18ce4 by Abseil Team <absl-team@google.com>:

Fix a typo in a comment.

PiperOrigin-RevId: 361576641

--
d20ccb27b7e9b53481e9192c1aae5202c06bfcb1 by Derek Mauro <dmauro@google.com>:

Remove the inline keyword from functions that aren't defined
in the header.

This may fix #910.

PiperOrigin-RevId: 361551300

--
aed9ae1dffa7b228dcb6ffbeb2fe06a13970c72b by Laramie Leavitt <lar@google.com>:

Propagate nice/strict/naggy state on absl::MockingBitGen.

Allowing NiceMocks reduces the log spam for un-mocked calls, and it enables nicer setup with ON_CALL, so it is desirable to support it in absl::MockingBitGen.  Internally, gmock tracks object "strictness" levels using an internal API; in order to achieve the same results we detect when the MockingBitGen is wrapped in a Nice/Naggy/Strict and wrap the internal implementation MockFunction in the same type.

This is achieved by providing overloads to the Call() function, and passing the mock object type down into it's own RegisterMock call, where a compile-time check verifies the state and creates the appropriate mock function.

PiperOrigin-RevId: 361233484

--
96186023fabd13d01d32d60d9c7ac4ead1aeb989 by Abseil Team <absl-team@google.com>:

Ensure that trivial types are passed by value rather than reference

PiperOrigin-RevId: 361217450

--
e1135944835d27f77e8119b8166d8fb6aa25f906 by Evan Brown <ezb@google.com>:

Internal change.

PiperOrigin-RevId: 361215882

--
583fe6c94c1c2ef757ef6e78292a15fbe4030e35 by Evan Brown <ezb@google.com>:

Increase the minimum number of slots per node from 3 to 4. We also rename kNodeValues (and related names) to kNodeSlots to make it clear that they are about the number of slots per node rather than the number of values per node - kMinNodeValues keeps the same name because it's actually about the number of values rather than the number of slots.

Motivation: I think the expected number of values per node, assuming random insertion order, is the average of the maximum and minimum numbers of values per node (kNodeSlots and kMinNodeValues). For large and/or even kNodeSlots, this is ~75% of kNodeSlots, but for kNodeSlots=3, this is ~67% of kNodeSlots. kMinNodeValues (which corresponds to worst-case occupancy) is ~33% of kNodeSlots, when kNodeSlots=3, compared to 50% for even kNodeSlots. This results in higher memory overhead per value, and since this case (kNodeSlots=3) is used when values are large, it seems worth fixing.
PiperOrigin-RevId: 361171495
GitOrigin-RevId: 5ed5dc9e17c66c298ee31cefc941a46348d8ad34
Change-Id: I8e33b5df1f987a77112093821085c410185ab51a
parent ab21820d
...@@ -317,7 +317,7 @@ namespace base_internal { ...@@ -317,7 +317,7 @@ namespace base_internal {
// Takes a reference to a guarded data member, and returns an unguarded // Takes a reference to a guarded data member, and returns an unguarded
// reference. // reference.
// Do not used this function directly, use ABSL_TS_UNCHECKED_READ instead. // Do not use this function directly, use ABSL_TS_UNCHECKED_READ instead.
template <typename T> template <typename T>
inline const T& ts_unchecked_read(const T& v) ABSL_NO_THREAD_SAFETY_ANALYSIS { inline const T& ts_unchecked_read(const T& v) ABSL_NO_THREAD_SAFETY_ANALYSIS {
return v; return v;
......
...@@ -26,6 +26,7 @@ ...@@ -26,6 +26,7 @@
#include <unordered_set> #include <unordered_set>
#include <vector> #include <vector>
#include "benchmark/benchmark.h"
#include "absl/base/internal/raw_logging.h" #include "absl/base/internal/raw_logging.h"
#include "absl/container/btree_map.h" #include "absl/container/btree_map.h"
#include "absl/container/btree_set.h" #include "absl/container/btree_set.h"
...@@ -39,7 +40,6 @@ ...@@ -39,7 +40,6 @@
#include "absl/strings/cord.h" #include "absl/strings/cord.h"
#include "absl/strings/str_format.h" #include "absl/strings/str_format.h"
#include "absl/time/time.h" #include "absl/time/time.h"
#include "benchmark/benchmark.h"
namespace absl { namespace absl {
ABSL_NAMESPACE_BEGIN ABSL_NAMESPACE_BEGIN
......
...@@ -1193,13 +1193,13 @@ class BtreeNodePeer { ...@@ -1193,13 +1193,13 @@ class BtreeNodePeer {
return btree_node< return btree_node<
set_params<ValueType, std::less<ValueType>, std::allocator<ValueType>, set_params<ValueType, std::less<ValueType>, std::allocator<ValueType>,
/*TargetNodeSize=*/256, // This parameter isn't used here. /*TargetNodeSize=*/256, // This parameter isn't used here.
/*Multi=*/false>>::SizeWithNValues(target_values_per_node); /*Multi=*/false>>::SizeWithNSlots(target_values_per_node);
} }
// Yields the number of values in a (non-root) leaf node for this btree. // Yields the number of slots in a (non-root) leaf node for this btree.
template <typename Btree> template <typename Btree>
constexpr static size_t GetNumValuesPerNode() { constexpr static size_t GetNumSlotsPerNode() {
return btree_node<typename Btree::params_type>::kNodeValues; return btree_node<typename Btree::params_type>::kNodeSlots;
} }
template <typename Btree> template <typename Btree>
...@@ -1458,7 +1458,7 @@ void ExpectOperationCounts(const int expected_moves, ...@@ -1458,7 +1458,7 @@ void ExpectOperationCounts(const int expected_moves,
TEST(Btree, MovesComparisonsCopiesSwapsTracking) { TEST(Btree, MovesComparisonsCopiesSwapsTracking) {
InstanceTracker tracker; InstanceTracker tracker;
// Note: this is minimum number of values per node. // Note: this is minimum number of values per node.
SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/3> set3; SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/4> set4;
// Note: this is the default number of values per node for a set of int32s // Note: this is the default number of values per node for a set of int32s
// (with 64-bit pointers). // (with 64-bit pointers).
SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/61> set61; SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/61> set61;
...@@ -1469,28 +1469,28 @@ TEST(Btree, MovesComparisonsCopiesSwapsTracking) { ...@@ -1469,28 +1469,28 @@ TEST(Btree, MovesComparisonsCopiesSwapsTracking) {
std::vector<int> values = std::vector<int> values =
GenerateValuesWithSeed<int>(10000, 1 << 22, /*seed=*/23); GenerateValuesWithSeed<int>(10000, 1 << 22, /*seed=*/23);
EXPECT_EQ(BtreeNodePeer::GetNumValuesPerNode<decltype(set3)>(), 3); EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set4)>(), 4);
EXPECT_EQ(BtreeNodePeer::GetNumValuesPerNode<decltype(set61)>(), 61); EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set61)>(), 61);
EXPECT_EQ(BtreeNodePeer::GetNumValuesPerNode<decltype(set100)>(), 100); EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set100)>(), 100);
if (sizeof(void *) == 8) { if (sizeof(void *) == 8) {
EXPECT_EQ(BtreeNodePeer::GetNumValuesPerNode<absl::btree_set<int32_t>>(), EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<absl::btree_set<int32_t>>(),
BtreeNodePeer::GetNumValuesPerNode<decltype(set61)>()); BtreeNodePeer::GetNumSlotsPerNode<decltype(set61)>());
} }
// Test key insertion/deletion in random order. // Test key insertion/deletion in random order.
ExpectOperationCounts(45281, 132551, values, &tracker, &set3); ExpectOperationCounts(56540, 134212, values, &tracker, &set4);
ExpectOperationCounts(386718, 129807, values, &tracker, &set61); ExpectOperationCounts(386718, 129807, values, &tracker, &set61);
ExpectOperationCounts(586761, 130310, values, &tracker, &set100); ExpectOperationCounts(586761, 130310, values, &tracker, &set100);
// Test key insertion/deletion in sorted order. // Test key insertion/deletion in sorted order.
std::sort(values.begin(), values.end()); std::sort(values.begin(), values.end());
ExpectOperationCounts(26638, 92134, values, &tracker, &set3); ExpectOperationCounts(24972, 85563, values, &tracker, &set4);
ExpectOperationCounts(20208, 87757, values, &tracker, &set61); ExpectOperationCounts(20208, 87757, values, &tracker, &set61);
ExpectOperationCounts(20124, 96583, values, &tracker, &set100); ExpectOperationCounts(20124, 96583, values, &tracker, &set100);
// Test key insertion/deletion in reverse sorted order. // Test key insertion/deletion in reverse sorted order.
std::reverse(values.begin(), values.end()); std::reverse(values.begin(), values.end());
ExpectOperationCounts(49951, 119325, values, &tracker, &set3); ExpectOperationCounts(54949, 127531, values, &tracker, &set4);
ExpectOperationCounts(338813, 118266, values, &tracker, &set61); ExpectOperationCounts(338813, 118266, values, &tracker, &set61);
ExpectOperationCounts(534529, 125279, values, &tracker, &set100); ExpectOperationCounts(534529, 125279, values, &tracker, &set100);
} }
...@@ -1507,9 +1507,9 @@ struct MovableOnlyInstanceThreeWayCompare { ...@@ -1507,9 +1507,9 @@ struct MovableOnlyInstanceThreeWayCompare {
TEST(Btree, MovesComparisonsCopiesSwapsTrackingThreeWayCompare) { TEST(Btree, MovesComparisonsCopiesSwapsTrackingThreeWayCompare) {
InstanceTracker tracker; InstanceTracker tracker;
// Note: this is minimum number of values per node. // Note: this is minimum number of values per node.
SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/3, SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/4,
MovableOnlyInstanceThreeWayCompare> MovableOnlyInstanceThreeWayCompare>
set3; set4;
// Note: this is the default number of values per node for a set of int32s // Note: this is the default number of values per node for a set of int32s
// (with 64-bit pointers). // (with 64-bit pointers).
SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/61, SizedBtreeSet<MovableOnlyInstance, /*TargetValuesPerNode=*/61,
...@@ -1524,28 +1524,28 @@ TEST(Btree, MovesComparisonsCopiesSwapsTrackingThreeWayCompare) { ...@@ -1524,28 +1524,28 @@ TEST(Btree, MovesComparisonsCopiesSwapsTrackingThreeWayCompare) {
std::vector<int> values = std::vector<int> values =
GenerateValuesWithSeed<int>(10000, 1 << 22, /*seed=*/23); GenerateValuesWithSeed<int>(10000, 1 << 22, /*seed=*/23);
EXPECT_EQ(BtreeNodePeer::GetNumValuesPerNode<decltype(set3)>(), 3); EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set4)>(), 4);
EXPECT_EQ(BtreeNodePeer::GetNumValuesPerNode<decltype(set61)>(), 61); EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set61)>(), 61);
EXPECT_EQ(BtreeNodePeer::GetNumValuesPerNode<decltype(set100)>(), 100); EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<decltype(set100)>(), 100);
if (sizeof(void *) == 8) { if (sizeof(void *) == 8) {
EXPECT_EQ(BtreeNodePeer::GetNumValuesPerNode<absl::btree_set<int32_t>>(), EXPECT_EQ(BtreeNodePeer::GetNumSlotsPerNode<absl::btree_set<int32_t>>(),
BtreeNodePeer::GetNumValuesPerNode<decltype(set61)>()); BtreeNodePeer::GetNumSlotsPerNode<decltype(set61)>());
} }
// Test key insertion/deletion in random order. // Test key insertion/deletion in random order.
ExpectOperationCounts(45281, 122560, values, &tracker, &set3); ExpectOperationCounts(56540, 124221, values, &tracker, &set4);
ExpectOperationCounts(386718, 119816, values, &tracker, &set61); ExpectOperationCounts(386718, 119816, values, &tracker, &set61);
ExpectOperationCounts(586761, 120319, values, &tracker, &set100); ExpectOperationCounts(586761, 120319, values, &tracker, &set100);
// Test key insertion/deletion in sorted order. // Test key insertion/deletion in sorted order.
std::sort(values.begin(), values.end()); std::sort(values.begin(), values.end());
ExpectOperationCounts(26638, 92134, values, &tracker, &set3); ExpectOperationCounts(24972, 85563, values, &tracker, &set4);
ExpectOperationCounts(20208, 87757, values, &tracker, &set61); ExpectOperationCounts(20208, 87757, values, &tracker, &set61);
ExpectOperationCounts(20124, 96583, values, &tracker, &set100); ExpectOperationCounts(20124, 96583, values, &tracker, &set100);
// Test key insertion/deletion in reverse sorted order. // Test key insertion/deletion in reverse sorted order.
std::reverse(values.begin(), values.end()); std::reverse(values.begin(), values.end());
ExpectOperationCounts(49951, 109326, values, &tracker, &set3); ExpectOperationCounts(54949, 117532, values, &tracker, &set4);
ExpectOperationCounts(338813, 108267, values, &tracker, &set61); ExpectOperationCounts(338813, 108267, values, &tracker, &set61);
ExpectOperationCounts(534529, 115280, values, &tracker, &set100); ExpectOperationCounts(534529, 115280, values, &tracker, &set100);
} }
......
...@@ -166,7 +166,7 @@ TryDecomposeValue(F&& f, Arg&& arg) { ...@@ -166,7 +166,7 @@ TryDecomposeValue(F&& f, Arg&& arg) {
} }
TEST(DecomposeValue, Decomposable) { TEST(DecomposeValue, Decomposable) {
auto f = [](const int& x, int&& y) { auto f = [](const int& x, int&& y) { // NOLINT
EXPECT_EQ(&x, &y); EXPECT_EQ(&x, &y);
EXPECT_EQ(42, x); EXPECT_EQ(42, x);
return 'A'; return 'A';
...@@ -200,7 +200,8 @@ TryDecomposePair(F&& f, Args&&... args) { ...@@ -200,7 +200,8 @@ TryDecomposePair(F&& f, Args&&... args) {
} }
TEST(DecomposePair, Decomposable) { TEST(DecomposePair, Decomposable) {
auto f = [](const int& x, std::piecewise_construct_t, std::tuple<int&&> k, auto f = [](const int& x, // NOLINT
std::piecewise_construct_t, std::tuple<int&&> k,
std::tuple<double>&& v) { std::tuple<double>&& v) {
EXPECT_EQ(&x, &std::get<0>(k)); EXPECT_EQ(&x, &std::get<0>(k));
EXPECT_EQ(42, x); EXPECT_EQ(42, x);
......
...@@ -120,10 +120,10 @@ class MockHelpers { ...@@ -120,10 +120,10 @@ class MockHelpers {
-> decltype(m.template RegisterMock< -> decltype(m.template RegisterMock<
typename KeySignature<KeyT>::result_type, typename KeySignature<KeyT>::result_type,
typename KeySignature<KeyT>::arg_tuple_type>( typename KeySignature<KeyT>::arg_tuple_type>(
std::declval<IdType>())) { m, std::declval<IdType>())) {
return m.template RegisterMock<typename KeySignature<KeyT>::result_type, return m.template RegisterMock<typename KeySignature<KeyT>::result_type,
typename KeySignature<KeyT>::arg_tuple_type>( typename KeySignature<KeyT>::arg_tuple_type>(
::absl::base_internal::FastTypeId<KeyT>()); m, ::absl::base_internal::FastTypeId<KeyT>());
} }
}; };
......
...@@ -19,7 +19,6 @@ ...@@ -19,7 +19,6 @@
#include <type_traits> #include <type_traits>
#include "gmock/gmock.h" #include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "absl/random/internal/mock_helpers.h" #include "absl/random/internal/mock_helpers.h"
#include "absl/random/mocking_bit_gen.h" #include "absl/random/mocking_bit_gen.h"
...@@ -45,9 +44,12 @@ struct MockSingleOverload<DistrT, Ret(MockingBitGen&, Args...)> { ...@@ -45,9 +44,12 @@ struct MockSingleOverload<DistrT, Ret(MockingBitGen&, Args...)> {
"Overload signature must have return type matching the " "Overload signature must have return type matching the "
"distribution result_type."); "distribution result_type.");
using KeyT = Ret(DistrT, std::tuple<Args...>); using KeyT = Ret(DistrT, std::tuple<Args...>);
auto gmock_Call(absl::MockingBitGen& gen,
const ::testing::Matcher<Args>&... matchers) template <typename MockURBG>
auto gmock_Call(MockURBG& gen, const ::testing::Matcher<Args>&... matchers)
-> decltype(MockHelpers::MockFor<KeyT>(gen).gmock_Call(matchers...)) { -> decltype(MockHelpers::MockFor<KeyT>(gen).gmock_Call(matchers...)) {
static_assert(std::is_base_of<MockingBitGen, MockURBG>::value,
"Mocking requires an absl::MockingBitGen");
return MockHelpers::MockFor<KeyT>(gen).gmock_Call(matchers...); return MockHelpers::MockFor<KeyT>(gen).gmock_Call(matchers...);
} }
}; };
...@@ -58,11 +60,14 @@ struct MockSingleOverload<DistrT, Ret(Arg, MockingBitGen&, Args...)> { ...@@ -58,11 +60,14 @@ struct MockSingleOverload<DistrT, Ret(Arg, MockingBitGen&, Args...)> {
"Overload signature must have return type matching the " "Overload signature must have return type matching the "
"distribution result_type."); "distribution result_type.");
using KeyT = Ret(DistrT, std::tuple<Arg, Args...>); using KeyT = Ret(DistrT, std::tuple<Arg, Args...>);
auto gmock_Call(const ::testing::Matcher<Arg>& matcher,
absl::MockingBitGen& gen, template <typename MockURBG>
auto gmock_Call(const ::testing::Matcher<Arg>& matcher, MockURBG& gen,
const ::testing::Matcher<Args>&... matchers) const ::testing::Matcher<Args>&... matchers)
-> decltype(MockHelpers::MockFor<KeyT>(gen).gmock_Call(matcher, -> decltype(MockHelpers::MockFor<KeyT>(gen).gmock_Call(matcher,
matchers...)) { matchers...)) {
static_assert(std::is_base_of<MockingBitGen, MockURBG>::value,
"Mocking requires an absl::MockingBitGen");
return MockHelpers::MockFor<KeyT>(gen).gmock_Call(matcher, matchers...); return MockHelpers::MockFor<KeyT>(gen).gmock_Call(matcher, matchers...);
} }
}; };
......
...@@ -175,13 +175,26 @@ class MockingBitGen { ...@@ -175,13 +175,26 @@ class MockingBitGen {
// //
// The returned MockFunction<...> type can be used to setup additional // The returned MockFunction<...> type can be used to setup additional
// distribution parameters of the expectation. // distribution parameters of the expectation.
template <typename ResultT, typename ArgTupleT> template <typename ResultT, typename ArgTupleT, typename SelfT>
auto RegisterMock(base_internal::FastTypeIdType type) auto RegisterMock(SelfT&, base_internal::FastTypeIdType type)
-> decltype(GetMockFnType(std::declval<ResultT>(), -> decltype(GetMockFnType(std::declval<ResultT>(),
std::declval<ArgTupleT>()))& { std::declval<ArgTupleT>()))& {
using MockFnType = decltype(GetMockFnType(std::declval<ResultT>(), using MockFnType = decltype(GetMockFnType(std::declval<ResultT>(),
std::declval<ArgTupleT>())); std::declval<ArgTupleT>()));
using ImplT = FunctionHolderImpl<MockFnType, ResultT, ArgTupleT>;
using WrappedFnType = absl::conditional_t<
std::is_same<SelfT, ::testing::NiceMock<absl::MockingBitGen>>::value,
::testing::NiceMock<MockFnType>,
absl::conditional_t<
std::is_same<SelfT,
::testing::NaggyMock<absl::MockingBitGen>>::value,
::testing::NaggyMock<MockFnType>,
absl::conditional_t<
std::is_same<SelfT,
::testing::StrictMock<absl::MockingBitGen>>::value,
::testing::StrictMock<MockFnType>, MockFnType>>>;
using ImplT = FunctionHolderImpl<WrappedFnType, ResultT, ArgTupleT>;
auto& mock = mocks_[type]; auto& mock = mocks_[type];
if (!mock) { if (!mock) {
mock = absl::make_unique<ImplT>(); mock = absl::make_unique<ImplT>();
......
...@@ -26,6 +26,8 @@ ...@@ -26,6 +26,8 @@
#include "absl/random/random.h" #include "absl/random/random.h"
namespace { namespace {
using ::testing::_;
using ::testing::Ne; using ::testing::Ne;
using ::testing::Return; using ::testing::Return;
...@@ -344,4 +346,47 @@ TEST(MockingBitGen, InSequenceSucceedsInOrder) { ...@@ -344,4 +346,47 @@ TEST(MockingBitGen, InSequenceSucceedsInOrder) {
EXPECT_EQ(absl::Poisson<int>(gen, 2.0), 4); EXPECT_EQ(absl::Poisson<int>(gen, 2.0), 4);
} }
TEST(MockingBitGen, NiceMock) {
::testing::NiceMock<absl::MockingBitGen> gen;
ON_CALL(absl::MockUniform<int>(), Call(gen, _, _)).WillByDefault(Return(145));
ON_CALL(absl::MockPoisson<int>(), Call(gen, _)).WillByDefault(Return(3));
EXPECT_EQ(absl::Uniform(gen, 1, 1000), 145);
EXPECT_EQ(absl::Uniform(gen, 10, 1000), 145);
EXPECT_EQ(absl::Uniform(gen, 100, 1000), 145);
}
TEST(MockingBitGen, NaggyMock) {
// This is difficult to test, as only the output matters, so just verify
// that ON_CALL can be installed. Anything else requires log inspection.
::testing::NaggyMock<absl::MockingBitGen> gen;
ON_CALL(absl::MockUniform<int>(), Call(gen, _, _)).WillByDefault(Return(145));
ON_CALL(absl::MockPoisson<int>(), Call(gen, _)).WillByDefault(Return(3));
EXPECT_EQ(absl::Uniform(gen, 1, 1000), 145);
}
TEST(MockingBitGen, StrictMock_NotEnough) {
EXPECT_NONFATAL_FAILURE(
[]() {
::testing::StrictMock<absl::MockingBitGen> gen;
EXPECT_CALL(absl::MockUniform<int>(), Call(gen, _, _))
.WillOnce(Return(145));
}(),
"unsatisfied and active");
}
TEST(MockingBitGen, StrictMock_TooMany) {
::testing::StrictMock<absl::MockingBitGen> gen;
EXPECT_CALL(absl::MockUniform<int>(), Call(gen, _, _)).WillOnce(Return(145));
EXPECT_EQ(absl::Uniform(gen, 1, 1000), 145);
EXPECT_NONFATAL_FAILURE(
[&]() { EXPECT_EQ(absl::Uniform(gen, 10, 1000), 0); }(),
"over-saturated and active");
}
} // namespace } // namespace
...@@ -144,7 +144,7 @@ strings_internal::DereferenceFormatterImpl<Formatter> DereferenceFormatter( ...@@ -144,7 +144,7 @@ strings_internal::DereferenceFormatterImpl<Formatter> DereferenceFormatter(
std::forward<Formatter>(f)); std::forward<Formatter>(f));
} }
// Function overload of `DererefenceFormatter()` for using a default // Function overload of `DereferenceFormatter()` for using a default
// `AlphaNumFormatter()`. // `AlphaNumFormatter()`.
inline strings_internal::DereferenceFormatterImpl< inline strings_internal::DereferenceFormatterImpl<
strings_internal::AlphaNumFormatterImpl> strings_internal::AlphaNumFormatterImpl>
......
...@@ -559,7 +559,7 @@ static SynchLocksHeld *Synch_GetAllLocks() { ...@@ -559,7 +559,7 @@ static SynchLocksHeld *Synch_GetAllLocks() {
} }
// Post on "w"'s associated PerThreadSem. // Post on "w"'s associated PerThreadSem.
inline void Mutex::IncrementSynchSem(Mutex *mu, PerThreadSynch *w) { void Mutex::IncrementSynchSem(Mutex *mu, PerThreadSynch *w) {
if (mu) { if (mu) {
ABSL_TSAN_MUTEX_PRE_DIVERT(mu, 0); ABSL_TSAN_MUTEX_PRE_DIVERT(mu, 0);
} }
......
...@@ -457,11 +457,9 @@ class ABSL_LOCKABLE Mutex { ...@@ -457,11 +457,9 @@ class ABSL_LOCKABLE Mutex {
// Post()/Wait() versus associated PerThreadSem; in class for required // Post()/Wait() versus associated PerThreadSem; in class for required
// friendship with PerThreadSem. // friendship with PerThreadSem.
static inline void IncrementSynchSem(Mutex *mu, static void IncrementSynchSem(Mutex *mu, base_internal::PerThreadSynch *w);
base_internal::PerThreadSynch *w); static bool DecrementSynchSem(Mutex *mu, base_internal::PerThreadSynch *w,
static inline bool DecrementSynchSem( synchronization_internal::KernelTimeout t);
Mutex *mu, base_internal::PerThreadSynch *w,
synchronization_internal::KernelTimeout t);
// slow path acquire // slow path acquire
void LockSlowLoop(SynchWaitParams *waitp, int flags); void LockSlowLoop(SynchWaitParams *waitp, int flags);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment