about summary refs log tree commit diff
path: root/third_party/abseil_cpp/absl/container/internal/btree.h
diff options
context:
space:
mode:
authorVincent Ambo <mail@tazj.in>2020-11-21T13·43+0100
committerVincent Ambo <mail@tazj.in>2020-11-21T14·48+0100
commit082c006c04343a78d87b6c6ab3608c25d6213c3f (patch)
tree16e6f04f8d1d1d2d67e8e917d5e7bb48c1b60375 /third_party/abseil_cpp/absl/container/internal/btree.h
parentcc27324d0226953943f408ce3c69ad7d648e005e (diff)
merge(3p/absl): subtree merge of Abseil up to e19260f r/1889
... notably, this includes Abseil's own StatusOr type, which
conflicted with our implementation (that was taken from TensorFlow).

Change-Id: Ie7d6764b64055caaeb8dc7b6b9d066291e6b538f
Diffstat (limited to 'third_party/abseil_cpp/absl/container/internal/btree.h')
-rw-r--r--third_party/abseil_cpp/absl/container/internal/btree.h651
1 files changed, 319 insertions, 332 deletions
diff --git a/third_party/abseil_cpp/absl/container/internal/btree.h b/third_party/abseil_cpp/absl/container/internal/btree.h
index f52fe235a2..f2fc31df8d 100644
--- a/third_party/abseil_cpp/absl/container/internal/btree.h
+++ b/third_party/abseil_cpp/absl/container/internal/btree.h
@@ -137,15 +137,14 @@ struct StringBtreeDefaultGreater {
 };
 
 // A helper class to convert a boolean comparison into a three-way "compare-to"
-// comparison that returns a negative value to indicate less-than, zero to
-// indicate equality and a positive value to indicate greater-than. This helper
+// comparison that returns an `absl::weak_ordering`. This helper
 // class is specialized for less<std::string>, greater<std::string>,
 // less<string_view>, greater<string_view>, less<absl::Cord>, and
 // greater<absl::Cord>.
 //
 // key_compare_to_adapter is provided so that btree users
 // automatically get the more efficient compare-to code when using common
-// google string types with common comparison functors.
+// Abseil string types with common comparison functors.
 // These string-like specializations also turn on heterogeneous lookup by
 // default.
 template <typename Compare>
@@ -183,12 +182,47 @@ struct key_compare_to_adapter<std::greater<absl::Cord>> {
   using type = StringBtreeDefaultGreater;
 };
 
+// Detects an 'absl_btree_prefer_linear_node_search' member. This is
+// a protocol used as an opt-in or opt-out of linear search.
+//
+//  For example, this would be useful for key types that wrap an integer
+//  and define their own cheap operator<(). For example:
+//
+//   class K {
+//    public:
+//     using absl_btree_prefer_linear_node_search = std::true_type;
+//     ...
+//    private:
+//     friend bool operator<(K a, K b) { return a.k_ < b.k_; }
+//     int k_;
+//   };
+//
+//   btree_map<K, V> m;  // Uses linear search
+//
+// If T has the preference tag, then it has a preference.
+// Btree will use the tag's truth value.
+template <typename T, typename = void>
+struct has_linear_node_search_preference : std::false_type {};
+template <typename T, typename = void>
+struct prefers_linear_node_search : std::false_type {};
+template <typename T>
+struct has_linear_node_search_preference<
+    T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
+    : std::true_type {};
+template <typename T>
+struct prefers_linear_node_search<
+    T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
+    : T::absl_btree_prefer_linear_node_search {};
+
 template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
           bool Multi, typename SlotPolicy>
 struct common_params {
   // If Compare is a common comparator for a string-like type, then we adapt it
   // to use heterogeneous lookup and to be a key-compare-to comparator.
   using key_compare = typename key_compare_to_adapter<Compare>::type;
+  // True when key_compare has been adapted to StringBtreeDefault{Less,Greater}.
+  using is_key_compare_adapted =
+      absl::negation<std::is_same<key_compare, Compare>>;
   // A type which indicates if we have a key-compare-to functor or a plain old
   // key-compare functor.
   using is_key_compare_to = btree_is_key_compare_to<key_compare, Key>;
@@ -255,10 +289,6 @@ struct common_params {
   static void move(Alloc *alloc, slot_type *src, slot_type *dest) {
     slot_policy::move(alloc, src, dest);
   }
-  static void move(Alloc *alloc, slot_type *first, slot_type *last,
-                   slot_type *result) {
-    slot_policy::move(alloc, first, last, result);
-  }
 };
 
 // A parameters structure for holding the type parameters for a btree_map.
@@ -290,9 +320,17 @@ struct map_params : common_params<Key, Compare, Alloc, TargetNodeSize, Multi,
   };
   using is_map_container = std::true_type;
 
-  static const Key &key(const value_type &value) { return value.first; }
-  static const Key &key(const init_type &init) { return init.first; }
+  template <typename V>
+  static auto key(const V &value) -> decltype(value.first) {
+    return value.first;
+  }
   static const Key &key(const slot_type *s) { return slot_policy::key(s); }
+  static const Key &key(slot_type *s) { return slot_policy::key(s); }
+  // For use in node handle.
+  static auto mutable_key(slot_type *s)
+      -> decltype(slot_policy::mutable_key(s)) {
+    return slot_policy::mutable_key(s);
+  }
   static mapped_type &value(value_type *value) { return value->second; }
 };
 
@@ -333,13 +371,6 @@ struct set_slot_policy {
   static void move(Alloc * /*alloc*/, slot_type *src, slot_type *dest) {
     *dest = std::move(*src);
   }
-
-  template <typename Alloc>
-  static void move(Alloc *alloc, slot_type *first, slot_type *last,
-                   slot_type *result) {
-    for (slot_type *src = first, *dest = result; src != last; ++src, ++dest)
-      move(alloc, src, dest);
-  }
 };
 
 // A parameters structure for holding the type parameters for a btree_set.
@@ -353,8 +384,10 @@ struct set_params : common_params<Key, Compare, Alloc, TargetNodeSize, Multi,
   using value_compare = typename set_params::common_params::key_compare;
   using is_map_container = std::false_type;
 
-  static const Key &key(const value_type &value) { return value; }
+  template <typename V>
+  static const V &key(const V &value) { return value; }
   static const Key &key(const slot_type *slot) { return *slot; }
+  static const Key &key(slot_type *slot) { return *slot; }
 };
 
 // An adapter class that converts a lower-bound compare into an upper-bound
@@ -390,6 +423,10 @@ struct SearchResult {
 // useful information.
 template <typename V>
 struct SearchResult<V, false> {
+  SearchResult() {}
+  explicit SearchResult(V value) : value(value) {}
+  SearchResult(V value, MatchKind /*match*/) : value(value) {}
+
   V value;
 
   static constexpr bool HasMatch() { return false; }
@@ -420,15 +457,22 @@ class btree_node {
   using difference_type = typename Params::difference_type;
 
   // Btree decides whether to use linear node search as follows:
+  //   - If the comparator expresses a preference, use that.
+  //   - If the key expresses a preference, use that.
   //   - If the key is arithmetic and the comparator is std::less or
   //     std::greater, choose linear.
   //   - Otherwise, choose binary.
   // TODO(ezb): Might make sense to add condition(s) based on node-size.
   using use_linear_search = std::integral_constant<
       bool,
-                std::is_arithmetic<key_type>::value &&
-                    (std::is_same<std::less<key_type>, key_compare>::value ||
-                     std::is_same<std::greater<key_type>, key_compare>::value)>;
+      has_linear_node_search_preference<key_compare>::value
+          ? prefers_linear_node_search<key_compare>::value
+          : has_linear_node_search_preference<key_type>::value
+                ? prefers_linear_node_search<key_type>::value
+                : std::is_arithmetic<key_type>::value &&
+                      (std::is_same<std::less<key_type>, key_compare>::value ||
+                       std::is_same<std::greater<key_type>,
+                                    key_compare>::value)>;
 
   // This class is organized by gtl::Layout as if it had the following
   // structure:
@@ -671,7 +715,7 @@ class btree_node {
       }
       ++s;
     }
-    return {s};
+    return SearchResult<int, false>{s};
   }
 
   // Returns the position of the first value whose key is not less than k using
@@ -706,7 +750,7 @@ class btree_node {
         e = mid;
       }
     }
-    return {s};
+    return SearchResult<int, false>{s};
   }
 
   // Returns the position of the first value whose key is not less than k using
@@ -754,14 +798,10 @@ class btree_node {
   template <typename... Args>
   void emplace_value(size_type i, allocator_type *alloc, Args &&... args);
 
-  // Removes the value at position i, shifting all existing values and children
-  // at positions > i to the left by 1.
-  void remove_value(int i, allocator_type *alloc);
-
-  // Removes the values at positions [i, i + to_erase), shifting all values
-  // after that range to the left by to_erase. Does not change children at all.
-  void remove_values_ignore_children(int i, int to_erase,
-                                     allocator_type *alloc);
+  // Removes the values at positions [i, i + to_erase), shifting all existing
+  // values and children after that range to the left by to_erase. Clears all
+  // children between [i, i + to_erase).
+  void remove_values(field_type i, field_type to_erase, allocator_type *alloc);
 
   // Rebalances a node with its right sibling.
   void rebalance_right_to_left(int to_move, btree_node *right,
@@ -773,7 +813,7 @@ class btree_node {
   void split(int insert_position, btree_node *dest, allocator_type *alloc);
 
   // Merges a node with its right sibling, moving all of the values and the
-  // delimiting key in the parent node onto itself.
+  // delimiting key in the parent node onto itself, and deleting the src node.
   void merge(btree_node *src, allocator_type *alloc);
 
   // Node allocation/deletion routines.
@@ -794,35 +834,43 @@ class btree_node {
     absl::container_internal::SanitizerPoisonMemoryRegion(
         &mutable_child(start()), (kNodeValues + 1) * sizeof(btree_node *));
   }
-  void destroy(allocator_type *alloc) {
-    for (int i = start(); i < finish(); ++i) {
-      value_destroy(i, alloc);
-    }
-  }
 
- public:
-  // Exposed only for tests.
-  static bool testonly_uses_linear_node_search() {
-    return use_linear_search::value;
+  static void deallocate(const size_type size, btree_node *node,
+                         allocator_type *alloc) {
+    absl::container_internal::Deallocate<Alignment()>(alloc, node, size);
   }
 
+  // Deletes a node and all of its children.
+  static void clear_and_delete(btree_node *node, allocator_type *alloc);
+
  private:
   template <typename... Args>
-  void value_init(const size_type i, allocator_type *alloc, Args &&... args) {
+  void value_init(const field_type i, allocator_type *alloc, Args &&... args) {
     absl::container_internal::SanitizerUnpoisonObject(slot(i));
     params_type::construct(alloc, slot(i), std::forward<Args>(args)...);
   }
-  void value_destroy(const size_type i, allocator_type *alloc) {
+  void value_destroy(const field_type i, allocator_type *alloc) {
     params_type::destroy(alloc, slot(i));
     absl::container_internal::SanitizerPoisonObject(slot(i));
   }
+  void value_destroy_n(const field_type i, const field_type n,
+                       allocator_type *alloc) {
+    for (slot_type *s = slot(i), *end = slot(i + n); s != end; ++s) {
+      params_type::destroy(alloc, s);
+      absl::container_internal::SanitizerPoisonObject(s);
+    }
+  }
+
+  static void transfer(slot_type *dest, slot_type *src, allocator_type *alloc) {
+    absl::container_internal::SanitizerUnpoisonObject(dest);
+    params_type::transfer(alloc, dest, src);
+    absl::container_internal::SanitizerPoisonObject(src);
+  }
 
   // Transfers value from slot `src_i` in `src_node` to slot `dest_i` in `this`.
   void transfer(const size_type dest_i, const size_type src_i,
                 btree_node *src_node, allocator_type *alloc) {
-    absl::container_internal::SanitizerUnpoisonObject(slot(dest_i));
-    params_type::transfer(alloc, slot(dest_i), src_node->slot(src_i));
-    absl::container_internal::SanitizerPoisonObject(src_node->slot(src_i));
+    transfer(slot(dest_i), src_node->slot(src_i), alloc);
   }
 
   // Transfers `n` values starting at value `src_i` in `src_node` into the
@@ -830,19 +878,11 @@ class btree_node {
   void transfer_n(const size_type n, const size_type dest_i,
                   const size_type src_i, btree_node *src_node,
                   allocator_type *alloc) {
-    absl::container_internal::SanitizerUnpoisonMemoryRegion(
-        slot(dest_i), n * sizeof(slot_type));
     for (slot_type *src = src_node->slot(src_i), *end = src + n,
                    *dest = slot(dest_i);
          src != end; ++src, ++dest) {
-      params_type::transfer(alloc, dest, src);
+      transfer(dest, src, alloc);
     }
-    // We take care to avoid poisoning transferred-to nodes in case of overlap.
-    const size_type overlap =
-        this == src_node ? (std::max)(src_i, dest_i + n) - src_i : 0;
-    assert(n >= overlap);
-    absl::container_internal::SanitizerPoisonMemoryRegion(
-        src_node->slot(src_i + overlap), (n - overlap) * sizeof(slot_type));
   }
 
   // Same as above, except that we start at the end and work our way to the
@@ -850,19 +890,11 @@ class btree_node {
   void transfer_n_backward(const size_type n, const size_type dest_i,
                            const size_type src_i, btree_node *src_node,
                            allocator_type *alloc) {
-    absl::container_internal::SanitizerUnpoisonMemoryRegion(
-        slot(dest_i), n * sizeof(slot_type));
     for (slot_type *src = src_node->slot(src_i + n - 1), *end = src - n,
                    *dest = slot(dest_i + n - 1);
          src != end; --src, --dest) {
-      params_type::transfer(alloc, dest, src);
+      transfer(dest, src, alloc);
     }
-    // We take care to avoid poisoning transferred-to nodes in case of overlap.
-    assert(this != src_node || dest_i >= src_i);
-    const size_type num_to_poison =
-        this == src_node ? (std::min)(n, dest_i - src_i) : n;
-    absl::container_internal::SanitizerPoisonMemoryRegion(
-        src_node->slot(src_i), num_to_poison * sizeof(slot_type));
   }
 
   template <typename P>
@@ -1020,6 +1052,10 @@ template <typename Params>
 class btree {
   using node_type = btree_node<Params>;
   using is_key_compare_to = typename Params::is_key_compare_to;
+  using init_type = typename Params::init_type;
+  using field_type = typename node_type::field_type;
+  using is_multi_container = typename Params::is_multi_container;
+  using is_key_compare_adapted = typename Params::is_key_compare_adapted;
 
   // We use a static empty node for the root/leftmost/rightmost of empty btrees
   // in order to avoid branching in begin()/end().
@@ -1054,7 +1090,7 @@ class btree {
 #endif
   }
 
-  enum {
+  enum : uint32_t {
     kNodeValues = node_type::kNodeValues,
     kMinNodeValues = kNodeValues / 2,
   };
@@ -1106,21 +1142,35 @@ class btree {
   // before this method is called. This method is used in copy construction,
   // copy assignment, and move assignment.
   template <typename Btree>
-  void copy_or_move_values_in_order(Btree *other);
+  void copy_or_move_values_in_order(Btree &other);
 
   // Validates that various assumptions/requirements are true at compile time.
   constexpr static bool static_assert_validation();
 
  public:
-  btree(const key_compare &comp, const allocator_type &alloc);
+  btree(const key_compare &comp, const allocator_type &alloc)
+      : root_(comp, alloc, EmptyNode()), rightmost_(EmptyNode()), size_(0) {}
 
-  btree(const btree &other);
+  btree(const btree &other) : btree(other, other.allocator()) {}
+  btree(const btree &other, const allocator_type &alloc)
+      : btree(other.key_comp(), alloc) {
+    copy_or_move_values_in_order(other);
+  }
   btree(btree &&other) noexcept
       : root_(std::move(other.root_)),
         rightmost_(absl::exchange(other.rightmost_, EmptyNode())),
         size_(absl::exchange(other.size_, 0)) {
     other.mutable_root() = EmptyNode();
   }
+  btree(btree &&other, const allocator_type &alloc)
+      : btree(other.key_comp(), alloc) {
+    if (alloc == other.allocator()) {
+      swap(other);
+    } else {
+      // Move values from `other` one at a time when allocators are different.
+      copy_or_move_values_in_order(other);
+    }
+  }
 
   ~btree() {
     // Put static_asserts in destructor to avoid triggering them before the type
@@ -1151,11 +1201,11 @@ class btree {
   // Finds the first element whose key is not less than key.
   template <typename K>
   iterator lower_bound(const K &key) {
-    return internal_end(internal_lower_bound(key));
+    return internal_end(internal_lower_bound(key).value);
   }
   template <typename K>
   const_iterator lower_bound(const K &key) const {
-    return internal_end(internal_lower_bound(key));
+    return internal_end(internal_lower_bound(key).value);
   }
 
   // Finds the first element whose key is greater than key.
@@ -1169,23 +1219,21 @@ class btree {
   }
 
   // Finds the range of values which compare equal to key. The first member of
-  // the returned pair is equal to lower_bound(key). The second member pair of
-  // the pair is equal to upper_bound(key).
+  // the returned pair is equal to lower_bound(key). The second member of the
+  // pair is equal to upper_bound(key).
   template <typename K>
-  std::pair<iterator, iterator> equal_range(const K &key) {
-    return {lower_bound(key), upper_bound(key)};
-  }
+  std::pair<iterator, iterator> equal_range(const K &key);
   template <typename K>
   std::pair<const_iterator, const_iterator> equal_range(const K &key) const {
-    return {lower_bound(key), upper_bound(key)};
+    return const_cast<btree *>(this)->equal_range(key);
   }
 
   // Inserts a value into the btree only if it does not already exist. The
   // boolean return value indicates whether insertion succeeded or failed.
   // Requirement: if `key` already exists in the btree, does not consume `args`.
   // Requirement: `key` is never referenced after consuming `args`.
-  template <typename... Args>
-  std::pair<iterator, bool> insert_unique(const key_type &key, Args &&... args);
+  template <typename K, typename... Args>
+  std::pair<iterator, bool> insert_unique(const K &key, Args &&... args);
 
   // Inserts with hint. Checks to see if the value should be placed immediately
   // before `position` in the tree. If so, then the insertion will take
@@ -1193,14 +1241,23 @@ class btree {
   // logarithmic time as if a call to insert_unique() were made.
   // Requirement: if `key` already exists in the btree, does not consume `args`.
   // Requirement: `key` is never referenced after consuming `args`.
-  template <typename... Args>
+  template <typename K, typename... Args>
   std::pair<iterator, bool> insert_hint_unique(iterator position,
-                                               const key_type &key,
+                                               const K &key,
                                                Args &&... args);
 
   // Insert a range of values into the btree.
+  // Note: the first overload avoids constructing a value_type if the key
+  // already exists in the btree.
+  template <typename InputIterator,
+            typename = decltype(std::declval<const key_compare &>()(
+                params_type::key(*std::declval<InputIterator>()),
+                std::declval<const key_type &>()))>
+  void insert_iterator_unique(InputIterator b, InputIterator e, int);
+  // We need the second overload for cases in which we need to construct a
+  // value_type in order to compare it with the keys already in the btree.
   template <typename InputIterator>
-  void insert_iterator_unique(InputIterator b, InputIterator e);
+  void insert_iterator_unique(InputIterator b, InputIterator e, char);
 
   // Inserts a value into the btree.
   template <typename ValueType>
@@ -1233,16 +1290,6 @@ class btree {
   // to the element after the last erased element.
   std::pair<size_type, iterator> erase_range(iterator begin, iterator end);
 
-  // Erases the specified key from the btree. Returns 1 if an element was
-  // erased and 0 otherwise.
-  template <typename K>
-  size_type erase_unique(const K &key);
-
-  // Erases all of the entries matching the specified key from the
-  // btree. Returns the number of elements erased.
-  template <typename K>
-  size_type erase_multi(const K &key);
-
   // Finds the iterator corresponding to a key or returns end() if the key is
   // not present.
   template <typename K>
@@ -1254,23 +1301,6 @@ class btree {
     return internal_end(internal_find(key));
   }
 
-  // Returns a count of the number of times the key appears in the btree.
-  template <typename K>
-  size_type count_unique(const K &key) const {
-    const iterator begin = internal_find(key);
-    if (begin.node == nullptr) {
-      // The key doesn't exist in the tree.
-      return 0;
-    }
-    return 1;
-  }
-  // Returns a count of the number of times the key appears in the btree.
-  template <typename K>
-  size_type count_multi(const K &key) const {
-    const auto range = equal_range(key);
-    return std::distance(range.first, range.second);
-  }
-
   // Clear the btree, deleting all of the values it contains.
   void clear();
 
@@ -1408,25 +1438,8 @@ class btree {
   }
 
   // Deletion helper routines.
-  void erase_same_node(iterator begin, iterator end);
-  iterator erase_from_leaf_node(iterator begin, size_type to_erase);
   iterator rebalance_after_delete(iterator iter);
 
-  // Deallocates a node of a certain size in bytes using the allocator.
-  void deallocate(const size_type size, node_type *node) {
-    absl::container_internal::Deallocate<node_type::Alignment()>(
-        mutable_allocator(), node, size);
-  }
-
-  void delete_internal_node(node_type *node) {
-    node->destroy(mutable_allocator());
-    deallocate(node_type::InternalSize(), node);
-  }
-  void delete_leaf_node(node_type *node) {
-    node->destroy(mutable_allocator());
-    deallocate(node_type::LeafSize(node->max_count()), node);
-  }
-
   // Rebalances or splits the node iter points to.
   void rebalance_or_split(iterator *iter);
 
@@ -1464,28 +1477,19 @@ class btree {
   static IterType internal_last(IterType iter);
 
   // Returns an iterator pointing to the leaf position at which key would
-  // reside in the tree. We provide 2 versions of internal_locate. The first
-  // version uses a less-than comparator and is incapable of distinguishing when
-  // there is an exact match. The second version is for the key-compare-to
-  // specialization and distinguishes exact matches. The key-compare-to
-  // specialization allows the caller to avoid a subsequent comparison to
-  // determine if an exact match was made, which is important for keys with
-  // expensive comparison, such as strings.
+  // reside in the tree, unless there is an exact match - in which case, the
+  // result may not be on a leaf. When there's a three-way comparator, we can
+  // return whether there was an exact match. This allows the caller to avoid a
+  // subsequent comparison to determine if an exact match was made, which is
+  // important for keys with expensive comparison, such as strings.
   template <typename K>
   SearchResult<iterator, is_key_compare_to::value> internal_locate(
       const K &key) const;
 
-  template <typename K>
-  SearchResult<iterator, false> internal_locate_impl(
-      const K &key, std::false_type /* IsCompareTo */) const;
-
-  template <typename K>
-  SearchResult<iterator, true> internal_locate_impl(
-      const K &key, std::true_type /* IsCompareTo */) const;
-
   // Internal routine which implements lower_bound().
   template <typename K>
-  iterator internal_lower_bound(const K &key) const;
+  SearchResult<iterator, is_key_compare_to::value> internal_lower_bound(
+      const K &key) const;
 
   // Internal routine which implements upper_bound().
   template <typename K>
@@ -1495,9 +1499,6 @@ class btree {
   template <typename K>
   iterator internal_find(const K &key) const;
 
-  // Deletes a node and all of its children.
-  void internal_clear(node_type *node);
-
   // Verifies the tree structure of node.
   int internal_verify(const node_type *node, const key_type *lo,
                       const key_type *hi) const;
@@ -1517,13 +1518,6 @@ class btree {
     return res;
   }
 
- public:
-  // Exposed only for tests.
-  static bool testonly_uses_linear_node_search() {
-    return node_type::testonly_uses_linear_node_search();
-  }
-
- private:
   // We use compressed tuple in order to save space because key_compare and
   // allocator_type are usually empty.
   absl::container_internal::CompressedTuple<key_compare, allocator_type,
@@ -1565,26 +1559,27 @@ inline void btree_node<P>::emplace_value(const size_type i,
 }
 
 template <typename P>
-inline void btree_node<P>::remove_value(const int i, allocator_type *alloc) {
-  if (!leaf() && finish() > i + 1) {
-    assert(child(i + 1)->count() == 0);
-    for (size_type j = i + 1; j < finish(); ++j) {
-      set_child(j, child(j + 1));
-    }
-    clear_child(finish());
-  }
-
-  remove_values_ignore_children(i, /*to_erase=*/1, alloc);
-}
+inline void btree_node<P>::remove_values(const field_type i,
+                                         const field_type to_erase,
+                                         allocator_type *alloc) {
+  // Transfer values after the removed range into their new places.
+  value_destroy_n(i, to_erase, alloc);
+  const field_type orig_finish = finish();
+  const field_type src_i = i + to_erase;
+  transfer_n(orig_finish - src_i, i, src_i, this, alloc);
 
-template <typename P>
-inline void btree_node<P>::remove_values_ignore_children(
-    const int i, const int to_erase, allocator_type *alloc) {
-  params_type::move(alloc, slot(i + to_erase), finish_slot(), slot(i));
-  for (int j = finish() - to_erase; j < finish(); ++j) {
-    value_destroy(j, alloc);
+  if (!leaf()) {
+    // Delete all children between begin and end.
+    for (int j = 0; j < to_erase; ++j) {
+      clear_and_delete(child(i + j + 1), alloc);
+    }
+    // Rotate children after end into new positions.
+    for (int j = i + to_erase + 1; j <= orig_finish; ++j) {
+      set_child(j - to_erase, child(j));
+      clear_child(j);
+    }
   }
-  set_finish(finish() - to_erase);
+  set_finish(orig_finish - to_erase);
 }
 
 template <typename P>
@@ -1736,8 +1731,59 @@ void btree_node<P>::merge(btree_node *src, allocator_type *alloc) {
   set_finish(start() + 1 + count() + src->count());
   src->set_finish(src->start());
 
-  // Remove the value on the parent node.
-  parent()->remove_value(position(), alloc);
+  // Remove the value on the parent node and delete the src node.
+  parent()->remove_values(position(), /*to_erase=*/1, alloc);
+}
+
+template <typename P>
+void btree_node<P>::clear_and_delete(btree_node *node, allocator_type *alloc) {
+  if (node->leaf()) {
+    node->value_destroy_n(node->start(), node->count(), alloc);
+    deallocate(LeafSize(node->max_count()), node, alloc);
+    return;
+  }
+  if (node->count() == 0) {
+    deallocate(InternalSize(), node, alloc);
+    return;
+  }
+
+  // The parent of the root of the subtree we are deleting.
+  btree_node *delete_root_parent = node->parent();
+
+  // Navigate to the leftmost leaf under node, and then delete upwards.
+  while (!node->leaf()) node = node->start_child();
+  // Use `int` because `pos` needs to be able to hold `kNodeValues+1`, which
+  // isn't guaranteed to be a valid `field_type`.
+  int pos = node->position();
+  btree_node *parent = node->parent();
+  for (;;) {
+    // In each iteration of the next loop, we delete one leaf node and go right.
+    assert(pos <= parent->finish());
+    do {
+      node = parent->child(pos);
+      if (!node->leaf()) {
+        // Navigate to the leftmost leaf under node.
+        while (!node->leaf()) node = node->start_child();
+        pos = node->position();
+        parent = node->parent();
+      }
+      node->value_destroy_n(node->start(), node->count(), alloc);
+      deallocate(LeafSize(node->max_count()), node, alloc);
+      ++pos;
+    } while (pos <= parent->finish());
+
+    // Once we've deleted all children of parent, delete parent and go up/right.
+    assert(pos > parent->finish());
+    do {
+      node = parent;
+      pos = node->position();
+      parent = node->parent();
+      node->value_destroy_n(node->start(), node->count(), alloc);
+      deallocate(InternalSize(), node, alloc);
+      if (parent == delete_root_parent) return;
+      ++pos;
+    } while (pos > parent->finish());
+  }
 }
 
 ////
@@ -1794,7 +1840,7 @@ void btree_iterator<N, R, P>::decrement_slow() {
 // btree methods
 template <typename P>
 template <typename Btree>
-void btree<P>::copy_or_move_values_in_order(Btree *other) {
+void btree<P>::copy_or_move_values_in_order(Btree &other) {
   static_assert(std::is_same<btree, Btree>::value ||
                     std::is_same<const btree, Btree>::value,
                 "Btree type must be same or const.");
@@ -1802,11 +1848,11 @@ void btree<P>::copy_or_move_values_in_order(Btree *other) {
 
   // We can avoid key comparisons because we know the order of the
   // values is the same order we'll store them in.
-  auto iter = other->begin();
-  if (iter == other->end()) return;
+  auto iter = other.begin();
+  if (iter == other.end()) return;
   insert_multi(maybe_move_from_iterator(iter));
   ++iter;
-  for (; iter != other->end(); ++iter) {
+  for (; iter != other.end(); ++iter) {
     // If the btree is not empty, we can just insert the new value at the end
     // of the tree.
     internal_emplace(end(), maybe_move_from_iterator(iter));
@@ -1845,25 +1891,52 @@ constexpr bool btree<P>::static_assert_validation() {
 }
 
 template <typename P>
-btree<P>::btree(const key_compare &comp, const allocator_type &alloc)
-    : root_(comp, alloc, EmptyNode()), rightmost_(EmptyNode()), size_(0) {}
-
-template <typename P>
-btree<P>::btree(const btree &other)
-    : btree(other.key_comp(), other.allocator()) {
-  copy_or_move_values_in_order(&other);
+template <typename K>
+auto btree<P>::equal_range(const K &key) -> std::pair<iterator, iterator> {
+  const SearchResult<iterator, is_key_compare_to::value> res =
+      internal_lower_bound(key);
+  const iterator lower = internal_end(res.value);
+  if (res.HasMatch() ? !res.IsEq()
+                     : lower == end() || compare_keys(key, lower.key())) {
+    return {lower, lower};
+  }
+
+  const iterator next = std::next(lower);
+  // When the comparator is heterogeneous, we can't assume that comparison with
+  // non-`key_type` will be equivalent to `key_type` comparisons so there
+  // could be multiple equivalent keys even in a unique-container. But for
+  // heterogeneous comparisons from the default string adapted comparators, we
+  // don't need to worry about this.
+  if (!is_multi_container::value &&
+      (std::is_same<K, key_type>::value || is_key_compare_adapted::value)) {
+    // The next iterator after lower must point to a key greater than `key`.
+    // Note: if this assert fails, then it may indicate that the comparator does
+    // not meet the equivalence requirements for Compare
+    // (see https://en.cppreference.com/w/cpp/named_req/Compare).
+    assert(next == end() || compare_keys(key, next.key()));
+    return {lower, next};
+  }
+  // Try once more to avoid the call to upper_bound() if there's only one
+  // equivalent key. This should prevent all calls to upper_bound() in cases of
+  // unique-containers with heterogeneous comparators in which all comparison
+  // operators have the same equivalence classes.
+  if (next == end() || compare_keys(key, next.key())) return {lower, next};
+
+  // In this case, we need to call upper_bound() to avoid worst case O(N)
+  // behavior if we were to iterate over equal keys.
+  return {lower, upper_bound(key)};
 }
 
 template <typename P>
-template <typename... Args>
-auto btree<P>::insert_unique(const key_type &key, Args &&... args)
+template <typename K, typename... Args>
+auto btree<P>::insert_unique(const K &key, Args &&... args)
     -> std::pair<iterator, bool> {
   if (empty()) {
     mutable_root() = rightmost_ = new_leaf_root_node(1);
   }
 
-  auto res = internal_locate(key);
-  iterator &iter = res.value;
+  SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
+  iterator iter = res.value;
 
   if (res.HasMatch()) {
     if (res.IsEq()) {
@@ -1881,8 +1954,8 @@ auto btree<P>::insert_unique(const key_type &key, Args &&... args)
 }
 
 template <typename P>
-template <typename... Args>
-inline auto btree<P>::insert_hint_unique(iterator position, const key_type &key,
+template <typename K, typename... Args>
+inline auto btree<P>::insert_hint_unique(iterator position, const K &key,
                                          Args &&... args)
     -> std::pair<iterator, bool> {
   if (!empty()) {
@@ -1906,14 +1979,23 @@ inline auto btree<P>::insert_hint_unique(iterator position, const key_type &key,
 }
 
 template <typename P>
-template <typename InputIterator>
-void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e) {
+template <typename InputIterator, typename>
+void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, int) {
   for (; b != e; ++b) {
     insert_hint_unique(end(), params_type::key(*b), *b);
   }
 }
 
 template <typename P>
+template <typename InputIterator>
+void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, char) {
+  for (; b != e; ++b) {
+    init_type value(*b);
+    insert_hint_unique(end(), params_type::key(value), std::move(value));
+  }
+}
+
+template <typename P>
 template <typename ValueType>
 auto btree<P>::insert_multi(const key_type &key, ValueType &&v) -> iterator {
   if (empty()) {
@@ -1968,7 +2050,7 @@ auto btree<P>::operator=(const btree &other) -> btree & {
       *mutable_allocator() = other.allocator();
     }
 
-    copy_or_move_values_in_order(&other);
+    copy_or_move_values_in_order(other);
   }
   return *this;
 }
@@ -1998,7 +2080,7 @@ auto btree<P>::operator=(btree &&other) noexcept -> btree & {
         // comparator while moving the values so we can't swap the key
         // comparators.
         *mutable_key_comp() = other.key_comp();
-        copy_or_move_values_in_order(&other);
+        copy_or_move_values_in_order(other);
       }
     }
   }
@@ -2010,7 +2092,7 @@ auto btree<P>::erase(iterator iter) -> iterator {
   bool internal_delete = false;
   if (!iter.node->leaf()) {
     // Deletion of a value on an internal node. First, move the largest value
-    // from our left child here, then delete that position (in remove_value()
+    // from our left child here, then delete that position (in remove_values()
     // below). We can get to the largest value from our left child by
     // decrementing iter.
     iterator internal_iter(iter);
@@ -2022,7 +2104,7 @@ auto btree<P>::erase(iterator iter) -> iterator {
   }
 
   // Delete the key from the leaf.
-  iter.node->remove_value(iter.position, mutable_allocator());
+  iter.node->remove_values(iter.position, /*to_erase=*/1, mutable_allocator());
   --size_;
 
   // We want to return the next value after the one we just erased. If we
@@ -2097,7 +2179,9 @@ auto btree<P>::erase_range(iterator begin, iterator end)
   }
 
   if (begin.node == end.node) {
-    erase_same_node(begin, end);
+    assert(end.position > begin.position);
+    begin.node->remove_values(begin.position, end.position - begin.position,
+                              mutable_allocator());
     size_ -= count;
     return {count, rebalance_after_delete(begin)};
   }
@@ -2107,8 +2191,11 @@ auto btree<P>::erase_range(iterator begin, iterator end)
     if (begin.node->leaf()) {
       const size_type remaining_to_erase = size_ - target_size;
       const size_type remaining_in_node = begin.node->finish() - begin.position;
-      begin = erase_from_leaf_node(
-          begin, (std::min)(remaining_to_erase, remaining_in_node));
+      const size_type to_erase =
+          (std::min)(remaining_to_erase, remaining_in_node);
+      begin.node->remove_values(begin.position, to_erase, mutable_allocator());
+      size_ -= to_erase;
+      begin = rebalance_after_delete(begin);
     } else {
       begin = erase(begin);
     }
@@ -2117,79 +2204,9 @@ auto btree<P>::erase_range(iterator begin, iterator end)
 }
 
 template <typename P>
-void btree<P>::erase_same_node(iterator begin, iterator end) {
-  assert(begin.node == end.node);
-  assert(end.position > begin.position);
-
-  node_type *node = begin.node;
-  size_type to_erase = end.position - begin.position;
-  if (!node->leaf()) {
-    // Delete all children between begin and end.
-    for (size_type i = 0; i < to_erase; ++i) {
-      internal_clear(node->child(begin.position + i + 1));
-    }
-    // Rotate children after end into new positions.
-    for (size_type i = begin.position + to_erase + 1; i <= node->finish();
-         ++i) {
-      node->set_child(i - to_erase, node->child(i));
-      node->clear_child(i);
-    }
-  }
-  node->remove_values_ignore_children(begin.position, to_erase,
-                                      mutable_allocator());
-
-  // Do not need to update rightmost_, because
-  // * either end == this->end(), and therefore node == rightmost_, and still
-  //   exists
-  // * or end != this->end(), and therefore rightmost_ hasn't been erased, since
-  //   it wasn't covered in [begin, end)
-}
-
-template <typename P>
-auto btree<P>::erase_from_leaf_node(iterator begin, size_type to_erase)
-    -> iterator {
-  node_type *node = begin.node;
-  assert(node->leaf());
-  assert(node->finish() > begin.position);
-  assert(begin.position + to_erase <= node->finish());
-
-  node->remove_values_ignore_children(begin.position, to_erase,
-                                      mutable_allocator());
-
-  size_ -= to_erase;
-
-  return rebalance_after_delete(begin);
-}
-
-template <typename P>
-template <typename K>
-auto btree<P>::erase_unique(const K &key) -> size_type {
-  const iterator iter = internal_find(key);
-  if (iter.node == nullptr) {
-    // The key doesn't exist in the tree, return nothing done.
-    return 0;
-  }
-  erase(iter);
-  return 1;
-}
-
-template <typename P>
-template <typename K>
-auto btree<P>::erase_multi(const K &key) -> size_type {
-  const iterator begin = internal_lower_bound(key);
-  if (begin.node == nullptr) {
-    // The key doesn't exist in the tree, return nothing done.
-    return 0;
-  }
-  // Delete all of the keys between begin and upper_bound(key).
-  const iterator end = internal_end(internal_upper_bound(key));
-  return erase_range(begin, end).first;
-}
-
-template <typename P>
 void btree<P>::clear() {
   if (!empty()) {
-    internal_clear(root());
+    node_type::clear_and_delete(root(), mutable_allocator());
   }
   mutable_root() = EmptyNode();
   rightmost_ = EmptyNode();
@@ -2244,11 +2261,11 @@ void btree<P>::rebalance_or_split(iterator *iter) {
         // inserting at the end of the right node then we bias rebalancing to
         // fill up the left node.
         int to_move = (kNodeValues - left->count()) /
-                      (1 + (insert_position < kNodeValues));
+                      (1 + (insert_position < static_cast<int>(kNodeValues)));
         to_move = (std::max)(1, to_move);
 
         if (insert_position - to_move >= node->start() ||
-            left->count() + to_move < kNodeValues) {
+            left->count() + to_move < static_cast<int>(kNodeValues)) {
           left->rebalance_right_to_left(to_move, node, mutable_allocator());
 
           assert(node->max_count() - node->count() == to_move);
@@ -2272,12 +2289,12 @@ void btree<P>::rebalance_or_split(iterator *iter) {
         // We bias rebalancing based on the position being inserted. If we're
         // inserting at the beginning of the left node then we bias rebalancing
         // to fill up the right node.
-        int to_move = (kNodeValues - right->count()) /
+        int to_move = (static_cast<int>(kNodeValues) - right->count()) /
                       (1 + (insert_position > node->start()));
         to_move = (std::max)(1, to_move);
 
         if (insert_position <= node->finish() - to_move ||
-            right->count() + to_move < kNodeValues) {
+            right->count() + to_move < static_cast<int>(kNodeValues)) {
           node->rebalance_left_to_right(to_move, right, mutable_allocator());
 
           if (insert_position > node->finish()) {
@@ -2330,12 +2347,7 @@ void btree<P>::rebalance_or_split(iterator *iter) {
 template <typename P>
 void btree<P>::merge_nodes(node_type *left, node_type *right) {
   left->merge(right, mutable_allocator());
-  if (right->leaf()) {
-    if (rightmost_ == right) rightmost_ = left;
-    delete_leaf_node(right);
-  } else {
-    delete_internal_node(right);
-  }
+  if (rightmost_ == right) rightmost_ = left;
 }
 
 template <typename P>
@@ -2345,7 +2357,7 @@ bool btree<P>::try_merge_or_rebalance(iterator *iter) {
     // Try merging with our left sibling.
     node_type *left = parent->child(iter->node->position() - 1);
     assert(left->max_count() == kNodeValues);
-    if (1 + left->count() + iter->node->count() <= kNodeValues) {
+    if (1U + left->count() + iter->node->count() <= kNodeValues) {
       iter->position += 1 + left->count();
       merge_nodes(left, iter->node);
       iter->node = left;
@@ -2356,7 +2368,7 @@ bool btree<P>::try_merge_or_rebalance(iterator *iter) {
     // Try merging with our right sibling.
     node_type *right = parent->child(iter->node->position() + 1);
     assert(right->max_count() == kNodeValues);
-    if (1 + iter->node->count() + right->count() <= kNodeValues) {
+    if (1U + iter->node->count() + right->count() <= kNodeValues) {
       merge_nodes(iter->node, right);
       return true;
     }
@@ -2392,20 +2404,20 @@ bool btree<P>::try_merge_or_rebalance(iterator *iter) {
 
 template <typename P>
 void btree<P>::try_shrink() {
-  if (root()->count() > 0) {
+  node_type *orig_root = root();
+  if (orig_root->count() > 0) {
     return;
   }
   // Deleted the last item on the root node, shrink the height of the tree.
-  if (root()->leaf()) {
+  if (orig_root->leaf()) {
     assert(size() == 0);
-    delete_leaf_node(root());
     mutable_root() = rightmost_ = EmptyNode();
   } else {
-    node_type *child = root()->start_child();
+    node_type *child = orig_root->start_child();
     child->make_root();
-    delete_internal_node(root());
     mutable_root() = child;
   }
+  node_type::clear_and_delete(orig_root, mutable_allocator());
 }
 
 template <typename P>
@@ -2433,7 +2445,7 @@ inline auto btree<P>::internal_emplace(iterator iter, Args &&... args)
     --iter;
     ++iter.position;
   }
-  const int max_count = iter.node->max_count();
+  const field_type max_count = iter.node->max_count();
   allocator_type *alloc = mutable_allocator();
   if (iter.node->count() == max_count) {
     // Make room in the leaf for the new item.
@@ -2450,7 +2462,7 @@ inline auto btree<P>::internal_emplace(iterator iter, Args &&... args)
                            old_root->start(), old_root, alloc);
       new_root->set_finish(old_root->finish());
       old_root->set_finish(old_root->start());
-      delete_leaf_node(old_root);
+      node_type::clear_and_delete(old_root, alloc);
       mutable_root() = rightmost_ = new_root;
     } else {
       rebalance_or_split(&iter);
@@ -2465,61 +2477,48 @@ template <typename P>
 template <typename K>
 inline auto btree<P>::internal_locate(const K &key) const
     -> SearchResult<iterator, is_key_compare_to::value> {
-  return internal_locate_impl(key, is_key_compare_to());
-}
-
-template <typename P>
-template <typename K>
-inline auto btree<P>::internal_locate_impl(
-    const K &key, std::false_type /* IsCompareTo */) const
-    -> SearchResult<iterator, false> {
   iterator iter(const_cast<node_type *>(root()));
   for (;;) {
-    iter.position = iter.node->lower_bound(key, key_comp()).value;
-    // NOTE: we don't need to walk all the way down the tree if the keys are
-    // equal, but determining equality would require doing an extra comparison
-    // on each node on the way down, and we will need to go all the way to the
-    // leaf node in the expected case.
-    if (iter.node->leaf()) {
-      break;
-    }
-    iter.node = iter.node->child(iter.position);
-  }
-  return {iter};
-}
-
-template <typename P>
-template <typename K>
-inline auto btree<P>::internal_locate_impl(
-    const K &key, std::true_type /* IsCompareTo */) const
-    -> SearchResult<iterator, true> {
-  iterator iter(const_cast<node_type *>(root()));
-  for (;;) {
-    SearchResult<int, true> res = iter.node->lower_bound(key, key_comp());
+    SearchResult<int, is_key_compare_to::value> res =
+        iter.node->lower_bound(key, key_comp());
     iter.position = res.value;
-    if (res.match == MatchKind::kEq) {
+    if (res.IsEq()) {
       return {iter, MatchKind::kEq};
     }
+    // Note: in the non-key-compare-to case, we don't need to walk all the way
+    // down the tree if the keys are equal, but determining equality would
+    // require doing an extra comparison on each node on the way down, and we
+    // will need to go all the way to the leaf node in the expected case.
     if (iter.node->leaf()) {
       break;
     }
     iter.node = iter.node->child(iter.position);
   }
+  // Note: in the non-key-compare-to case, the key may actually be equivalent
+  // here (and the MatchKind::kNe is ignored).
   return {iter, MatchKind::kNe};
 }
 
 template <typename P>
 template <typename K>
-auto btree<P>::internal_lower_bound(const K &key) const -> iterator {
+auto btree<P>::internal_lower_bound(const K &key) const
+    -> SearchResult<iterator, is_key_compare_to::value> {
   iterator iter(const_cast<node_type *>(root()));
+  SearchResult<int, is_key_compare_to::value> res;
+  bool seen_eq = false;
   for (;;) {
-    iter.position = iter.node->lower_bound(key, key_comp()).value;
+    res = iter.node->lower_bound(key, key_comp());
+    iter.position = res.value;
+    // TODO(ezb): we should be able to terminate early on IsEq() if there can't
+    // be multiple equivalent keys in container for this lookup type.
     if (iter.node->leaf()) {
       break;
     }
+    seen_eq = seen_eq || res.IsEq();
     iter.node = iter.node->child(iter.position);
   }
-  return internal_last(iter);
+  if (res.IsEq()) return {iter, MatchKind::kEq};
+  return {internal_last(iter), seen_eq ? MatchKind::kEq : MatchKind::kNe};
 }
 
 template <typename P>
@@ -2539,7 +2538,7 @@ auto btree<P>::internal_upper_bound(const K &key) const -> iterator {
 template <typename P>
 template <typename K>
 auto btree<P>::internal_find(const K &key) const -> iterator {
-  auto res = internal_locate(key);
+  SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
   if (res.HasMatch()) {
     if (res.IsEq()) {
       return res.value;
@@ -2554,18 +2553,6 @@ auto btree<P>::internal_find(const K &key) const -> iterator {
 }
 
 template <typename P>
-void btree<P>::internal_clear(node_type *node) {
-  if (!node->leaf()) {
-    for (int i = node->start(); i <= node->finish(); ++i) {
-      internal_clear(node->child(i));
-    }
-    delete_internal_node(node);
-  } else {
-    delete_leaf_node(node);
-  }
-}
-
-template <typename P>
 int btree<P>::internal_verify(const node_type *node, const key_type *lo,
                               const key_type *hi) const {
   assert(node->count() > 0);