about summary refs log tree commit diff
path: root/absl/container
diff options
context:
space:
mode:
Diffstat (limited to 'absl/container')
-rw-r--r--absl/container/btree_map.h24
-rw-r--r--absl/container/btree_test.cc59
-rw-r--r--absl/container/internal/btree_container.h65
-rw-r--r--absl/container/internal/hashtablez_sampler.cc9
-rw-r--r--absl/container/internal/hashtablez_sampler.h13
-rw-r--r--absl/container/internal/hashtablez_sampler_test.cc2
-rw-r--r--absl/container/internal/raw_hash_set_test.cc51
7 files changed, 165 insertions, 58 deletions
diff --git a/absl/container/btree_map.h b/absl/container/btree_map.h
index cbfcb58c4129..d23f4ee5e648 100644
--- a/absl/container/btree_map.h
+++ b/absl/container/btree_map.h
@@ -226,6 +226,30 @@ class btree_map
   //   Inserts the elements within the initializer list `ilist`.
   using Base::insert;
 
+  // btree_map::insert_or_assign()
+  //
+  // Inserts an element of the specified value into the `btree_map` provided
+  // that a value with the given key does not already exist, or replaces the
+  // corresponding mapped type with the forwarded `obj` argument if a key for
+  // that value already exists, returning an iterator pointing to the newly
+  // inserted element. Overloads are listed below.
+  //
+  // pair<iterator, bool> insert_or_assign(const key_type& k, M&& obj):
+  // pair<iterator, bool> insert_or_assign(key_type&& k, M&& obj):
+  //
+  //   Inserts/Assigns (or moves) the element of the specified key into the
+  //   `btree_map`. If the returned bool is true, insertion took place, and if
+  //   it's false, assignment took place.
+  //
+  // iterator insert_or_assign(const_iterator hint,
+  //                           const key_type& k, M&& obj):
+  // iterator insert_or_assign(const_iterator hint, key_type&& k, M&& obj):
+  //
+  //   Inserts/Assigns (or moves) the element of the specified key into the
+  //   `btree_map` using the position of `hint` as a non-binding suggestion
+  //   for where to begin the insertion search.
+  using Base::insert_or_assign;
+
   // btree_map::emplace()
   //
   // Inserts an element of the specified value by constructing it in-place
diff --git a/absl/container/btree_test.cc b/absl/container/btree_test.cc
index 8692b9c2b9d7..af8ee00b3524 100644
--- a/absl/container/btree_test.cc
+++ b/absl/container/btree_test.cc
@@ -2345,6 +2345,65 @@ TEST(Btree, EraseIf) {
   }
 }
 
+TEST(Btree, InsertOrAssign) {
+  absl::btree_map<int, int> m = {{1, 1}, {3, 3}};
+  using value_type = typename decltype(m)::value_type;
+
+  auto ret = m.insert_or_assign(4, 4);
+  EXPECT_EQ(*ret.first, value_type(4, 4));
+  EXPECT_TRUE(ret.second);
+  ret = m.insert_or_assign(3, 100);
+  EXPECT_EQ(*ret.first, value_type(3, 100));
+  EXPECT_FALSE(ret.second);
+
+  auto hint_ret = m.insert_or_assign(ret.first, 3, 200);
+  EXPECT_EQ(*hint_ret, value_type(3, 200));
+  hint_ret = m.insert_or_assign(m.find(1), 0, 1);
+  EXPECT_EQ(*hint_ret, value_type(0, 1));
+  // Test with bad hint.
+  hint_ret = m.insert_or_assign(m.end(), -1, 1);
+  EXPECT_EQ(*hint_ret, value_type(-1, 1));
+
+  EXPECT_THAT(m, ElementsAre(Pair(-1, 1), Pair(0, 1), Pair(1, 1), Pair(3, 200),
+                             Pair(4, 4)));
+}
+
+TEST(Btree, InsertOrAssignMovableOnly) {
+  absl::btree_map<int, MovableOnlyInstance> m;
+  using value_type = typename decltype(m)::value_type;
+
+  auto ret = m.insert_or_assign(4, MovableOnlyInstance(4));
+  EXPECT_EQ(*ret.first, value_type(4, MovableOnlyInstance(4)));
+  EXPECT_TRUE(ret.second);
+  ret = m.insert_or_assign(4, MovableOnlyInstance(100));
+  EXPECT_EQ(*ret.first, value_type(4, MovableOnlyInstance(100)));
+  EXPECT_FALSE(ret.second);
+
+  auto hint_ret = m.insert_or_assign(ret.first, 3, MovableOnlyInstance(200));
+  EXPECT_EQ(*hint_ret, value_type(3, MovableOnlyInstance(200)));
+
+  EXPECT_EQ(m.size(), 2);
+}
+
+TEST(Btree, BitfieldArgument) {
+  union {
+    int n : 1;
+  };
+  n = 0;
+  absl::btree_map<int, int> m;
+  m.erase(n);
+  m.count(n);
+  m.find(n);
+  m.contains(n);
+  m.equal_range(n);
+  m.insert_or_assign(n, n);
+  m.insert_or_assign(m.end(), n, n);
+  m.try_emplace(n);
+  m.try_emplace(m.end(), n);
+  m.at(n);
+  m[n];
+}
+
 }  // namespace
 }  // namespace container_internal
 ABSL_NAMESPACE_END
diff --git a/absl/container/internal/btree_container.h b/absl/container/internal/btree_container.h
index 04795c2e3f0a..3e6ff4b892f1 100644
--- a/absl/container/internal/btree_container.h
+++ b/absl/container/internal/btree_container.h
@@ -372,7 +372,7 @@ class btree_map_container : public btree_set_container<Tree> {
   using super_type = btree_set_container<Tree>;
   using params_type = typename Tree::params_type;
 
- protected:
+ private:
   template <class K>
   using key_arg = typename super_type::template key_arg<K>;
 
@@ -390,6 +390,69 @@ class btree_map_container : public btree_set_container<Tree> {
   btree_map_container() {}
 
   // Insertion routines.
+  // Note: the nullptr template arguments and extra `const M&` overloads allow
+  // for supporting bitfield arguments.
+  // Note: when we call `std::forward<M>(obj)` twice, it's safe because
+  // insert_unique/insert_hint_unique are guaranteed to not consume `obj` when
+  // `ret.second` is false.
+  template <class M>
+  std::pair<iterator, bool> insert_or_assign(const key_type &k, const M &obj) {
+    const std::pair<iterator, bool> ret = this->tree_.insert_unique(k, k, obj);
+    if (!ret.second) ret.first->second = obj;
+    return ret;
+  }
+  template <class M, key_type * = nullptr>
+  std::pair<iterator, bool> insert_or_assign(key_type &&k, const M &obj) {
+    const std::pair<iterator, bool> ret =
+        this->tree_.insert_unique(k, std::move(k), obj);
+    if (!ret.second) ret.first->second = obj;
+    return ret;
+  }
+  template <class M, M * = nullptr>
+  std::pair<iterator, bool> insert_or_assign(const key_type &k, M &&obj) {
+    const std::pair<iterator, bool> ret =
+        this->tree_.insert_unique(k, k, std::forward<M>(obj));
+    if (!ret.second) ret.first->second = std::forward<M>(obj);
+    return ret;
+  }
+  template <class M, key_type * = nullptr, M * = nullptr>
+  std::pair<iterator, bool> insert_or_assign(key_type &&k, M &&obj) {
+    const std::pair<iterator, bool> ret =
+        this->tree_.insert_unique(k, std::move(k), std::forward<M>(obj));
+    if (!ret.second) ret.first->second = std::forward<M>(obj);
+    return ret;
+  }
+  template <class M>
+  iterator insert_or_assign(const_iterator position, const key_type &k,
+                            const M &obj) {
+    const std::pair<iterator, bool> ret =
+        this->tree_.insert_hint_unique(iterator(position), k, k, obj);
+    if (!ret.second) ret.first->second = obj;
+    return ret.first;
+  }
+  template <class M, key_type * = nullptr>
+  iterator insert_or_assign(const_iterator position, key_type &&k,
+                            const M &obj) {
+    const std::pair<iterator, bool> ret = this->tree_.insert_hint_unique(
+        iterator(position), k, std::move(k), obj);
+    if (!ret.second) ret.first->second = obj;
+    return ret.first;
+  }
+  template <class M, M * = nullptr>
+  iterator insert_or_assign(const_iterator position, const key_type &k,
+                            M &&obj) {
+    const std::pair<iterator, bool> ret = this->tree_.insert_hint_unique(
+        iterator(position), k, k, std::forward<M>(obj));
+    if (!ret.second) ret.first->second = std::forward<M>(obj);
+    return ret.first;
+  }
+  template <class M, key_type * = nullptr, M * = nullptr>
+  iterator insert_or_assign(const_iterator position, key_type &&k, M &&obj) {
+    const std::pair<iterator, bool> ret = this->tree_.insert_hint_unique(
+        iterator(position), k, std::move(k), std::forward<M>(obj));
+    if (!ret.second) ret.first->second = std::forward<M>(obj);
+    return ret.first;
+  }
   template <typename... Args>
   std::pair<iterator, bool> try_emplace(const key_type &k, Args &&... args) {
     return this->tree_.insert_unique(
diff --git a/absl/container/internal/hashtablez_sampler.cc b/absl/container/internal/hashtablez_sampler.cc
index e15f44443f59..564472517844 100644
--- a/absl/container/internal/hashtablez_sampler.cc
+++ b/absl/container/internal/hashtablez_sampler.cc
@@ -39,17 +39,16 @@ ABSL_CONST_INIT std::atomic<bool> g_hashtablez_enabled{
 ABSL_CONST_INIT std::atomic<int32_t> g_hashtablez_sample_parameter{1 << 10};
 ABSL_CONST_INIT std::atomic<int32_t> g_hashtablez_max_samples{1 << 20};
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
 ABSL_PER_THREAD_TLS_KEYWORD absl::base_internal::ExponentialBiased
     g_exponential_biased_generator;
 #endif
 
 }  // namespace
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
 ABSL_PER_THREAD_TLS_KEYWORD int64_t global_next_sample = 0;
-#endif  // ABSL_PER_THREAD_TLS == 1
-
+#endif  // defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
 
 HashtablezSampler& HashtablezSampler::Global() {
   static auto* sampler = new HashtablezSampler();
@@ -192,7 +191,7 @@ HashtablezInfo* SampleSlow(int64_t* next_sample) {
     return HashtablezSampler::Global().Register();
   }
 
-#if ABSL_PER_THREAD_TLS == 0
+#if !defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
   *next_sample = std::numeric_limits<int64_t>::max();
   return nullptr;
 #else
diff --git a/absl/container/internal/hashtablez_sampler.h b/absl/container/internal/hashtablez_sampler.h
index c4f9629fcb47..34d5e5723c0f 100644
--- a/absl/container/internal/hashtablez_sampler.h
+++ b/absl/container/internal/hashtablez_sampler.h
@@ -180,14 +180,23 @@ class HashtablezInfoHandle {
   HashtablezInfo* info_;
 };
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
+#error ABSL_INTERNAL_HASHTABLEZ_SAMPLE cannot be directly set
+#endif  // defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
+
+#if (ABSL_PER_THREAD_TLS == 1) && !defined(ABSL_BUILD_DLL) && \
+    !defined(ABSL_CONSUME_DLL)
+#define ABSL_INTERNAL_HASHTABLEZ_SAMPLE
+#endif
+
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
 extern ABSL_PER_THREAD_TLS_KEYWORD int64_t global_next_sample;
 #endif  // ABSL_PER_THREAD_TLS
 
 // Returns an RAII sampling handle that manages registration and unregistation
 // with the global sampler.
 inline HashtablezInfoHandle Sample() {
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
   if (ABSL_PREDICT_TRUE(--global_next_sample > 0)) {
     return HashtablezInfoHandle(nullptr);
   }
diff --git a/absl/container/internal/hashtablez_sampler_test.cc b/absl/container/internal/hashtablez_sampler_test.cc
index 102b23757cf6..36f5ccdd02a7 100644
--- a/absl/container/internal/hashtablez_sampler_test.cc
+++ b/absl/container/internal/hashtablez_sampler_test.cc
@@ -169,7 +169,7 @@ TEST(HashtablezInfoTest, RecordRehash) {
   EXPECT_EQ(info.num_erases.load(), 0);
 }
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_HASHTABLEZ_SAMPLE)
 TEST(HashtablezSamplerTest, SmallSampleParameter) {
   SetHashtablezEnabled(true);
   SetHashtablezSampleParameter(100);
diff --git a/absl/container/internal/raw_hash_set_test.cc b/absl/container/internal/raw_hash_set_test.cc
index 38e5e0e8d3bd..a96ae68ac76c 100644
--- a/absl/container/internal/raw_hash_set_test.cc
+++ b/absl/container/internal/raw_hash_set_test.cc
@@ -418,53 +418,6 @@ TEST(Table, Empty) {
   EXPECT_TRUE(t.empty());
 }
 
-#ifdef __GNUC__
-template <class T>
-ABSL_ATTRIBUTE_ALWAYS_INLINE inline void DoNotOptimize(const T& v) {
-  asm volatile("" : : "r,m"(v) : "memory");
-}
-#endif
-
-TEST(Table, Prefetch) {
-  IntTable t;
-  t.emplace(1);
-  // Works for both present and absent keys.
-  t.prefetch(1);
-  t.prefetch(2);
-
-  // Do not run in debug mode, when prefetch is not implemented, or when
-  // sanitizers are enabled, or on WebAssembly.
-#if defined(NDEBUG) && defined(__GNUC__) && defined(__x86_64__) &&          \
-    !defined(ADDRESS_SANITIZER) && !defined(MEMORY_SANITIZER) &&            \
-    !defined(THREAD_SANITIZER) && !defined(UNDEFINED_BEHAVIOR_SANITIZER) && \
-    !defined(__EMSCRIPTEN__)
-  const auto now = [] { return absl::base_internal::CycleClock::Now(); };
-
-  // Make size enough to not fit in L2 cache (16.7 Mb)
-  static constexpr int size = 1 << 22;
-  for (int i = 0; i < size; ++i) t.insert(i);
-
-  int64_t no_prefetch = 0, prefetch = 0;
-  for (int iter = 0; iter < 10; ++iter) {
-    int64_t time = now();
-    for (int i = 0; i < size; ++i) {
-      DoNotOptimize(t.find(i));
-    }
-    no_prefetch += now() - time;
-
-    time = now();
-    for (int i = 0; i < size; ++i) {
-      t.prefetch(i + 20);
-      DoNotOptimize(t.find(i));
-    }
-    prefetch += now() - time;
-  }
-
-  // no_prefetch is at least 30% slower.
-  EXPECT_GE(1.0 * no_prefetch / prefetch, 1.3);
-#endif
-}
-
 TEST(Table, LookupEmpty) {
   IntTable t;
   auto it = t.find(0);
@@ -1842,7 +1795,7 @@ TEST(TableDeathTest, EraseOfEndAsserts) {
   EXPECT_DEATH_IF_SUPPORTED(t.erase(t.end()), kDeathMsg);
 }
 
-#if ABSL_PER_THREAD_TLS == 1
+#if defined(ABSL_HASHTABLEZ_SAMPLE)
 TEST(RawHashSamplerTest, Sample) {
   // Enable the feature even if the prod default is off.
   SetHashtablezEnabled(true);
@@ -1863,7 +1816,7 @@ TEST(RawHashSamplerTest, Sample) {
   EXPECT_NEAR((end_size - start_size) / static_cast<double>(tables.size()),
               0.01, 0.005);
 }
-#endif
+#endif  // ABSL_HASHTABLEZ_SAMPLER
 
 TEST(RawHashSamplerTest, DoNotSampleCustomAllocators) {
   // Enable the feature even if the prod default is off.