about summary refs log tree commit diff
path: root/absl/container/internal
diff options
context:
space:
mode:
Diffstat (limited to 'absl/container/internal')
-rw-r--r--absl/container/internal/raw_hash_set.h13
-rw-r--r--absl/container/internal/raw_hash_set_test.cc41
2 files changed, 52 insertions, 2 deletions
diff --git a/absl/container/internal/raw_hash_set.h b/absl/container/internal/raw_hash_set.h
index 85e33344c7d5..9c926812a07a 100644
--- a/absl/container/internal/raw_hash_set.h
+++ b/absl/container/internal/raw_hash_set.h
@@ -1437,7 +1437,18 @@ class raw_hash_set {
 
   void initialize_slots() {
     assert(capacity_);
-    if (slots_ == nullptr) {
+    // Folks with custom allocators often make unwaranted assumptions about the
+    // behavior of their classes vis-a-vis trivial destructability and what
+    // calls they will or wont make.  Avoid sampling for people with custom
+    // allocators to get us out of this mess.  This is not a hard guarntee but a
+    // workaround while we plan the exact guarantee we want to provide.
+    //
+    // People are often sloppy with the exact type of their allocator (sometimes
+    // it has an extra const or is missing the pair, but rebinds made it work
+    // anyway).  To avoid the ambiguitity, we work off SlotAlloc which we have
+    // bound more carefully.
+    if (std::is_same<SlotAlloc, std::allocator<slot_type>>::value &&
+        slots_ == nullptr) {
       infoz_ = Sample();
     }
 
diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc
index 7d96ed90be9c..871d85f139a1 100644
--- a/absl/container/internal/raw_hash_set_test.cc
+++ b/absl/container/internal/raw_hash_set_test.cc
@@ -343,7 +343,25 @@ struct IntTable
     : raw_hash_set<IntPolicy, container_internal::hash_default_hash<int64_t>,
                    std::equal_to<int64_t>, std::allocator<int64_t>> {
   using Base = typename IntTable::raw_hash_set;
-  IntTable() {}
+  using Base::Base;
+};
+
+template <typename T>
+struct CustomAlloc : std::allocator<T> {
+  CustomAlloc() {}
+
+  template <typename U>
+  CustomAlloc(const CustomAlloc<U>& other) {}
+
+  template<class U> struct rebind {
+    using other = CustomAlloc<U>;
+  };
+};
+
+struct CustomAllocIntTable
+    : raw_hash_set<IntPolicy, container_internal::hash_default_hash<int64_t>,
+                   std::equal_to<int64_t>, CustomAlloc<int64_t>> {
+  using Base = typename CustomAllocIntTable::raw_hash_set;
   using Base::Base;
 };
 
@@ -1869,6 +1887,27 @@ TEST(RawHashSamplerTest, Sample) {
               0.01, 0.005);
 }
 
+TEST(RawHashSamplerTest, DoNotSampleCustomAllocators) {
+  // Enable the feature even if the prod default is off.
+  SetHashtablezEnabled(true);
+  SetHashtablezSampleParameter(100);
+
+  auto& sampler = HashtablezSampler::Global();
+  size_t start_size = 0;
+  start_size += sampler.Iterate([&](const HashtablezInfo&) { ++start_size; });
+
+  std::vector<CustomAllocIntTable> tables;
+  for (int i = 0; i < 1000000; ++i) {
+    tables.emplace_back();
+    tables.back().insert(1);
+  }
+  size_t end_size = 0;
+  end_size += sampler.Iterate([&](const HashtablezInfo&) { ++end_size; });
+
+  EXPECT_NEAR((end_size - start_size) / static_cast<double>(tables.size()),
+              0.00, 0.001);
+}
+
 #ifdef ADDRESS_SANITIZER
 TEST(Sanitizer, PoisoningUnused) {
   IntTable t;