about summary refs log tree commit diff
path: root/absl/container/internal
diff options
context:
space:
mode:
Diffstat (limited to 'absl/container/internal')
-rw-r--r--absl/container/internal/btree_container.h65
-rw-r--r--absl/container/internal/hashtablez_sampler.cc9
-rw-r--r--absl/container/internal/hashtablez_sampler.h13
-rw-r--r--absl/container/internal/hashtablez_sampler_test.cc2
-rw-r--r--absl/container/internal/raw_hash_set_test.cc51
5 files changed, 82 insertions, 58 deletions
diff --git a/absl/container/internal/btree_container.h b/absl/container/internal/btree_container.h
index 04795c2e3f0a..3e6ff4b892f1 100644
--- a/absl/container/internal/btree_container.h
+++ b/absl/container/internal/btree_container.h
@@ -372,7 +372,7 @@ class btree_map_container : public btree_set_container<Tree> {
   using super_type = btree_set_container<Tree>;
   using params_type = typename Tree::params_type;
 
- protected:
+ private:
   template <class K>
   using key_arg = typename super_type::template key_arg<K>;
 
@@ -390,6 +390,69 @@ class btree_map_container : public btree_set_container<Tree> {
   btree_map_container() {}
 
   // Insertion routines.
+  // Note: the nullptr template arguments and extra `const M&` overloads allow
+  // for supporting bitfield arguments.
+  // Note: when we call `std::forward<M>(obj)` twice, it's safe because
+  // insert_unique/insert_hint_unique are guaranteed to not consume `obj` when
+  // `ret.second` is false.
+  template <class M>
+  std::pair<iterator, bool> insert_or_assign(const key_type &k, const M &obj) {
+    const std::pair<iterator, bool> ret = this->tree_.insert_unique(k, k, obj);
+    if (!ret.second) ret.first->second = obj;
+    return ret;
+  }
+  template <class M, key_type * = nullptr>
+  std::pair<iterator, bool> insert_or_assign(key_type &&k, const M &obj) {
+    const std::pair<iterator, bool> ret =
+        this->tree_.insert_unique(k, std::move(k), obj);
+    if (!ret.second) ret.first->second = obj;
+    return ret;
+  }
+  template <class M, M * = nullptr>
+  std::pair<iterator, bool> insert_or_assign(const key_type &k, M &&obj) {
+    const std::pair<iterator, bool> ret =
+        this->tree_.insert_unique(k, k, std::forward<M>(obj));
+    if (!ret.second) ret.first->second = std::forward<M>(obj);
+    return ret;
+  }
+  template <class M, key_type * = nullptr, M * = nullptr>
+  std::pair<iterator, bool> insert_or_assign(key_type &&k, M &&obj) {
+    const std::pair<iterator, bool> ret =
+        this->tree_.insert_unique(k, std::move(k), std::forward<M>(obj));
+    if (!ret.second) ret.first->second = std::forward<M>(obj);
+    return ret;
+  }
+  template <class M>
+  iterator insert_or_assign(const_iterator position, const key_type &k,
+                            const M &obj) {
+    const std::pair<iterator, bool> ret =
+        this->tree_.insert_hint_unique(iterator(position), k, k, obj);
+    if (!ret.second) ret.first->second = obj;
+    return ret.first;
+  }
+  template <class M, key_type * = nullptr>
+  iterator insert_or_assign(const_iterator position, key_type &&k,
+                            const M &obj) {
+    const std::pair<iterator, bool> ret = this->tree_.insert_hint_unique(
+        iterator(position), k, std::move(k), obj);
+    if (!ret.second) ret.first->second = obj;
+    return ret.first;
+  }
+  template <class M, M * = nullptr>
+  iterator insert_or_assign(const_iterator position, const key_type &k,
+                            M &&obj) {
+    const std::pair<iterator, bool> ret = this->tree_.insert_hint_unique(
+        iterator(position), k, k, std::forward<M>(obj));
+    if (!ret.second) ret.first->second = std::forward<M>(obj);
+    return ret.first;
+  }
+  template <class M, key_type * = nullptr, M * = nullptr>
+  iterator insert_or_assign(const_iterator position, key_type &&k, M &&obj) {
+    const std::pair<iterator, bool> ret = this->tree_.insert_hint_unique(
+        iterator(position), k, std::move(k), std::forward<M>(obj));
+    if (!ret.second) ret.first->second = std::forward<M>(obj);
+    return ret.first;
+  }
   template <typename... Args>
   std::pair<iterator, bool> try_emplace(const key_type &k, Args &&... args) {
     return this->tree_.insert_unique(
diff --git a/absl/container/internal/hashtablez_sampler.cc b/absl/container/internal/hashtablez_sampler.cc
index e15f44443f59..564472517844 100644
--- a/absl/container/internal/hashtablez_sampler.cc
+++ b/absl/container/internal/hashtablez_sampler.cc
@@ -39,17 +39,16 @@ ABSL_CONST_INIT std::atomic<bool> g_hashtablez_enabled{
 ABSL_CONST_INIT std::atomic<int32_t> g_hashtablez_sample_parameter{1 << 10};
 ABSL_CONST_INIT std::atomic<int32_t> g_hashtablez_max_samples{1 << 20};
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
 ABSL_PER_THREAD_TLS_KEYWORD absl::base_internal::ExponentialBiased
     g_exponential_biased_generator;
 #endif
 
 }  // namespace
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
 ABSL_PER_THREAD_TLS_KEYWORD int64_t global_next_sample = 0;
-#endif  // ABSL_PER_THREAD_TLS == 1
-
+#endif  // defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
 
 HashtablezSampler& HashtablezSampler::Global() {
   static auto* sampler = new HashtablezSampler();
@@ -192,7 +191,7 @@ HashtablezInfo* SampleSlow(int64_t* next_sample) {
     return HashtablezSampler::Global().Register();
   }
 
-#if ABSL_PER_THREAD_TLS == 0
+#if !defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
   *next_sample = std::numeric_limits<int64_t>::max();
   return nullptr;
 #else
diff --git a/absl/container/internal/hashtablez_sampler.h b/absl/container/internal/hashtablez_sampler.h
index c4f9629fcb47..34d5e5723c0f 100644
--- a/absl/container/internal/hashtablez_sampler.h
+++ b/absl/container/internal/hashtablez_sampler.h
@@ -180,14 +180,23 @@ class HashtablezInfoHandle {
   HashtablezInfo* info_;
 };
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
+#error ABSL_INTERNAL_HASHTABLEZ_SAMPLE cannot be directly set
+#endif  // defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
+
+#if (ABSL_PER_THREAD_TLS == 1) && !defined(ABSL_BUILD_DLL) && \
+    !defined(ABSL_CONSUME_DLL)
+#define ABSL_INTERNAL_HASHTABLEZ_SAMPLE
+#endif
+
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
 extern ABSL_PER_THREAD_TLS_KEYWORD int64_t global_next_sample;
 #endif  // ABSL_PER_THREAD_TLS
 
 // Returns an RAII sampling handle that manages registration and unregistation
 // with the global sampler.
 inline HashtablezInfoHandle Sample() {
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
   if (ABSL_PREDICT_TRUE(--global_next_sample > 0)) {
     return HashtablezInfoHandle(nullptr);
   }
diff --git a/absl/container/internal/hashtablez_sampler_test.cc b/absl/container/internal/hashtablez_sampler_test.cc
index 102b23757cf6..36f5ccdd02a7 100644
--- a/absl/container/internal/hashtablez_sampler_test.cc
+++ b/absl/container/internal/hashtablez_sampler_test.cc
@@ -169,7 +169,7 @@ TEST(HashtablezInfoTest, RecordRehash) {
   EXPECT_EQ(info.num_erases.load(), 0);
 }
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_HASHTABLEZ_SAMPLE)
 TEST(HashtablezSamplerTest, SmallSampleParameter) {
   SetHashtablezEnabled(true);
   SetHashtablezSampleParameter(100);
diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc
index 38e5e0e8d3bd..a96ae68ac76c 100644
--- a/absl/container/internal/raw_hash_set_test.cc
+++ b/absl/container/internal/raw_hash_set_test.cc
@@ -418,53 +418,6 @@ TEST(Table, Empty) {
   EXPECT_TRUE(t.empty());
 }
 
-#ifdef __GNUC__
-template <class T>
-ABSL_ATTRIBUTE_ALWAYS_INLINE inline void DoNotOptimize(const T& v) {
-  asm volatile("" : : "r,m"(v) : "memory");
-}
-#endif
-
-TEST(Table, Prefetch) {
-  IntTable t;
-  t.emplace(1);
-  // Works for both present and absent keys.
-  t.prefetch(1);
-  t.prefetch(2);
-
-  // Do not run in debug mode, when prefetch is not implemented, or when
-  // sanitizers are enabled, or on WebAssembly.
-#if defined(NDEBUG) && defined(__GNUC__) && defined(__x86_64__) &&          \
-    !defined(ADDRESS_SANITIZER) && !defined(MEMORY_SANITIZER) &&            \
-    !defined(THREAD_SANITIZER) && !defined(UNDEFINED_BEHAVIOR_SANITIZER) && \
-    !defined(__EMSCRIPTEN__)
-  const auto now = [] { return absl::base_internal::CycleClock::Now(); };
-
-  // Make size enough to not fit in L2 cache (16.7 Mb)
-  static constexpr int size = 1 << 22;
-  for (int i = 0; i < size; ++i) t.insert(i);
-
-  int64_t no_prefetch = 0, prefetch = 0;
-  for (int iter = 0; iter < 10; ++iter) {
-    int64_t time = now();
-    for (int i = 0; i < size; ++i) {
-      DoNotOptimize(t.find(i));
-    }
-    no_prefetch += now() - time;
-
-    time = now();
-    for (int i = 0; i < size; ++i) {
-      t.prefetch(i + 20);
-      DoNotOptimize(t.find(i));
-    }
-    prefetch += now() - time;
-  }
-
-  // no_prefetch is at least 30% slower.
-  EXPECT_GE(1.0 * no_prefetch / prefetch, 1.3);
-#endif
-}
-
 TEST(Table, LookupEmpty) {
   IntTable t;
   auto it = t.find(0);
@@ -1842,7 +1795,7 @@ TEST(TableDeathTest, EraseOfEndAsserts) {
   EXPECT_DEATH_IF_SUPPORTED(t.erase(t.end()), kDeathMsg);
 }
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_HASHTABLEZ_SAMPLE)
 TEST(RawHashSamplerTest, Sample) {
   // Enable the feature even if the prod default is off.
   SetHashtablezEnabled(true);
@@ -1863,7 +1816,7 @@ TEST(RawHashSamplerTest, Sample) {
   EXPECT_NEAR((end_size - start_size) / static_cast<double>(tables.size()),
               0.01, 0.005);
 }
-#endif
+#endif  // ABSL_HASHTABLEZ_SAMPLER
 
 TEST(RawHashSamplerTest, DoNotSampleCustomAllocators) {
   // Enable the feature even if the prod default is off.