about summary refs log tree commit diff
path: root/absl/strings/cord.cc
diff options
context:
space:
mode:
authorAbseil Team <absl-team@google.com>2020-03-31T19·32-0700
committerAndy Getz <durandal@google.com>2020-03-31T20·34-0400
commitfba8a316c30690097de5d6127ad307d84a1b74ca (patch)
tree952cde69de3f0bca33453bf166514711e3abd5e0 /absl/strings/cord.cc
parent79e0dc11514df035a8d07a356f9ee1800fb2160c (diff)
Export of internal Abseil changes
--
2dd5008c7b4176859e320c7c337078adb173b662 by Tom Manshreck <shreck@google.com>:

Internal change

PiperOrigin-RevId: 304022549

--
6442abd78697b03cfe698b0d0dac7f1eb4b5cb38 by Andy Getzendanner <durandal@google.com>:

Internal change

PiperOrigin-RevId: 303890410

--
eb8b37b468b0f23da09d3de714272928ef61f942 by Gennadiy Rozental <rogeeff@google.com>:

Roll changes forward with ChunkIterator templatized.

This should facilitate usage of "small" chunk iterator for a regular usage and proper "big" iterator internally in Cord implementation. This way Cord users are not exposed to stack size overhead if they have a lot of chunk iterators or recursive implementation which relies on chunk iterators.

PiperOrigin-RevId: 303877118

--
9623c569e7c55b45254e95f2d14c5badf9c901aa by Gennadiy Rozental <rogeeff@google.com>:

Switch Flags implementation of fast type id to use absl/base/internal/fast_type_id.h

PiperOrigin-RevId: 303861019

--
e2931e8d53c86d0816da6bbc8ba58cf5a3a443bb by Matthew Brown <matthewbr@google.com>:

Internal Change

PiperOrigin-RevId: 303832407

--
b549ed6e441e920b8ad6f02a80b9fd543820ef86 by Tom Manshreck <shreck@google.com>:

Update Cord header file comments to Abseil standards

PiperOrigin-RevId: 303823232

--
fc633d4f31a2d058f2b6a7029fc7c9820cd71c92 by Evan Brown <ezb@google.com>:

Remove top-level const from K/V in map_slot_type::mutable_value and map_slot_type::key.

This allows us to move between `map_slot_type::mutable_value`s internally even when the key_type and/or mapped_type specified by the user are const.

PiperOrigin-RevId: 303811694

--
909b3ce7cb3583ee9c374d36ff5f82bba02a1b64 by Derek Mauro <dmauro@google.com>:

Add hardening assertions to the preconditions of absl::Cord

PiperOrigin-RevId: 303419537

--
9d32f79eabd54e6cb17bcc28b53e9bcfeb3cf6f4 by Greg Falcon <gfalcon@google.com>:

Don't use MSVC-specific bit manipulations when using Clang on Windows.

This fixes a compiler warning.  Note that we do not have continuous testing for this configuration; this CL is best-effort support.

PiperOrigin-RevId: 303322582

--
f6e0a35a2b9081d2a9eef73789b7bc1b5e46e5ad by Gennadiy Rozental <rogeeff@google.com>:

Introduce standlone FastTypeId utility to represent compile time unique type id.

PiperOrigin-RevId: 303180545

--
99120e9fbdb5b2d327139ab8f617533d7bc3345b by Abseil Team <absl-team@google.com>:

Changed absl's import of std::string_view to
using string_view = std::string_view.
This should help tools (e.g. include-what-you-use) discover where absl::string_view is defined.

PiperOrigin-RevId: 303169095
GitOrigin-RevId: 2dd5008c7b4176859e320c7c337078adb173b662
Change-Id: I1e18ae08e23686ac963e7ea5e5bd499e18d51048
Diffstat (limited to 'absl/strings/cord.cc')
-rw-r--r--absl/strings/cord.cc291
1 files changed, 152 insertions, 139 deletions
diff --git a/absl/strings/cord.cc b/absl/strings/cord.cc
index 4f64f79965c7..7de7766c5116 100644
--- a/absl/strings/cord.cc
+++ b/absl/strings/cord.cc
@@ -28,9 +28,9 @@
 
 #include "absl/base/casts.h"
 #include "absl/base/internal/raw_logging.h"
+#include "absl/base/macros.h"
 #include "absl/base/port.h"
 #include "absl/container/fixed_array.h"
-#include "absl/container/inlined_vector.h"
 #include "absl/strings/escaping.h"
 #include "absl/strings/internal/cord_internal.h"
 #include "absl/strings/internal/resize_uninitialized.h"
@@ -132,6 +132,14 @@ inline const CordRepExternal* CordRep::external() const {
   return static_cast<const CordRepExternal*>(this);
 }
 
+using CordTreeConstPath = CordTreePath<const CordRep*, MaxCordDepth()>;
+
+// This type is used to store the list of pending nodes during re-balancing.
+// Its maximum size is 2 * MaxCordDepth() because the tree has a maximum
+// possible depth of MaxCordDepth() and every concat node along a tree path
+// could theoretically be split during rebalancing.
+using RebalancingStack = CordTreePath<CordRep*, 2 * MaxCordDepth()>;
+
 }  // namespace cord_internal
 
 static const size_t kFlatOverhead = offsetof(CordRep, data);
@@ -180,98 +188,78 @@ static constexpr size_t TagToLength(uint8_t tag) {
 // Enforce that kMaxFlatSize maps to a well-known exact tag value.
 static_assert(TagToAllocatedSize(224) == kMaxFlatSize, "Bad tag logic");
 
-constexpr uint64_t Fibonacci(unsigned char n, uint64_t a = 0, uint64_t b = 1) {
-  return n == 0 ? a : Fibonacci(n - 1, b, a + b);
+constexpr size_t Fibonacci(uint8_t n, const size_t a = 0, const size_t b = 1) {
+  return n == 0
+             ? a
+             : n == 1 ? b
+                      : Fibonacci(n - 1, b,
+                                  (a > (size_t(-1) - b)) ? size_t(-1) : a + b);
 }
 
-static_assert(Fibonacci(63) == 6557470319842,
-              "Fibonacci values computed incorrectly");
-
 // Minimum length required for a given depth tree -- a tree is considered
 // balanced if
-//      length(t) >= min_length[depth(t)]
-// The root node depth is allowed to become twice as large to reduce rebalancing
-// for larger strings (see IsRootBalanced).
-static constexpr uint64_t min_length[] = {
-    Fibonacci(2),
-    Fibonacci(3),
-    Fibonacci(4),
-    Fibonacci(5),
-    Fibonacci(6),
-    Fibonacci(7),
-    Fibonacci(8),
-    Fibonacci(9),
-    Fibonacci(10),
-    Fibonacci(11),
-    Fibonacci(12),
-    Fibonacci(13),
-    Fibonacci(14),
-    Fibonacci(15),
-    Fibonacci(16),
-    Fibonacci(17),
-    Fibonacci(18),
-    Fibonacci(19),
-    Fibonacci(20),
-    Fibonacci(21),
-    Fibonacci(22),
-    Fibonacci(23),
-    Fibonacci(24),
-    Fibonacci(25),
-    Fibonacci(26),
-    Fibonacci(27),
-    Fibonacci(28),
-    Fibonacci(29),
-    Fibonacci(30),
-    Fibonacci(31),
-    Fibonacci(32),
-    Fibonacci(33),
-    Fibonacci(34),
-    Fibonacci(35),
-    Fibonacci(36),
-    Fibonacci(37),
-    Fibonacci(38),
-    Fibonacci(39),
-    Fibonacci(40),
-    Fibonacci(41),
-    Fibonacci(42),
-    Fibonacci(43),
-    Fibonacci(44),
-    Fibonacci(45),
-    Fibonacci(46),
-    Fibonacci(47),
-    0xffffffffffffffffull,  // Avoid overflow
-};
-
-static const int kMinLengthSize = ABSL_ARRAYSIZE(min_length);
-
-// The inlined size to use with absl::InlinedVector.
-//
-// Note: The InlinedVectors in this file (and in cord.h) do not need to use
-// the same value for their inlined size. The fact that they do is historical.
-// It may be desirable for each to use a different inlined size optimized for
-// that InlinedVector's usage.
-//
-// TODO(jgm): Benchmark to see if there's a more optimal value than 47 for
-// the inlined vector size (47 exists for backward compatibility).
-static const int kInlinedVectorSize = 47;
-
-static inline bool IsRootBalanced(CordRep* node) {
-  if (node->tag != CONCAT) {
-    return true;
-  } else if (node->concat()->depth() <= 15) {
-    return true;
-  } else if (node->concat()->depth() > kMinLengthSize) {
-    return false;
-  } else {
-    // Allow depth to become twice as large as implied by fibonacci rule to
-    // reduce rebalancing for larger strings.
-    return (node->length >= min_length[node->concat()->depth() / 2]);
-  }
+//      length(t) >= kMinLength[depth(t)]
+// The node depth is allowed to become larger to reduce rebalancing
+// for larger strings (see ShouldRebalance).
+constexpr size_t kMinLength[] = {
+    Fibonacci(2),  Fibonacci(3),  Fibonacci(4),  Fibonacci(5),  Fibonacci(6),
+    Fibonacci(7),  Fibonacci(8),  Fibonacci(9),  Fibonacci(10), Fibonacci(11),
+    Fibonacci(12), Fibonacci(13), Fibonacci(14), Fibonacci(15), Fibonacci(16),
+    Fibonacci(17), Fibonacci(18), Fibonacci(19), Fibonacci(20), Fibonacci(21),
+    Fibonacci(22), Fibonacci(23), Fibonacci(24), Fibonacci(25), Fibonacci(26),
+    Fibonacci(27), Fibonacci(28), Fibonacci(29), Fibonacci(30), Fibonacci(31),
+    Fibonacci(32), Fibonacci(33), Fibonacci(34), Fibonacci(35), Fibonacci(36),
+    Fibonacci(37), Fibonacci(38), Fibonacci(39), Fibonacci(40), Fibonacci(41),
+    Fibonacci(42), Fibonacci(43), Fibonacci(44), Fibonacci(45), Fibonacci(46),
+    Fibonacci(47), Fibonacci(48), Fibonacci(49), Fibonacci(50), Fibonacci(51),
+    Fibonacci(52), Fibonacci(53), Fibonacci(54), Fibonacci(55), Fibonacci(56),
+    Fibonacci(57), Fibonacci(58), Fibonacci(59), Fibonacci(60), Fibonacci(61),
+    Fibonacci(62), Fibonacci(63), Fibonacci(64), Fibonacci(65), Fibonacci(66),
+    Fibonacci(67), Fibonacci(68), Fibonacci(69), Fibonacci(70), Fibonacci(71),
+    Fibonacci(72), Fibonacci(73), Fibonacci(74), Fibonacci(75), Fibonacci(76),
+    Fibonacci(77), Fibonacci(78), Fibonacci(79), Fibonacci(80), Fibonacci(81),
+    Fibonacci(82), Fibonacci(83), Fibonacci(84), Fibonacci(85), Fibonacci(86),
+    Fibonacci(87), Fibonacci(88), Fibonacci(89), Fibonacci(90), Fibonacci(91),
+    Fibonacci(92), Fibonacci(93), Fibonacci(94), Fibonacci(95)};
+
+static_assert(sizeof(kMinLength) / sizeof(size_t) >=
+                  (cord_internal::MaxCordDepth() + 1),
+              "Not enough elements in kMinLength array to cover all the "
+              "supported Cord depth(s)");
+
+inline bool ShouldRebalance(const CordRep* node) {
+  if (node->tag != CONCAT) return false;
+
+  size_t node_depth = node->concat()->depth();
+
+  if (node_depth <= 15) return false;
+
+  // Rebalancing Cords is expensive, so we reduce how often rebalancing occurs
+  // by allowing shallow Cords to have twice the depth that the Fibonacci rule
+  // would otherwise imply. Deep Cords need to follow the rule more closely,
+  // however to ensure algorithm correctness. We implement this with linear
+  // interpolation. Cords of depth 16 are treated as though they have a depth
+  // of 16 * 1/2, and Cords of depth MaxCordDepth() interpolate to
+  // MaxCordDepth() * 1.
+  return node->length <
+         kMinLength[(node_depth * (cord_internal::MaxCordDepth() - 16)) /
+                    (2 * cord_internal::MaxCordDepth() - 16 - node_depth)];
+}
+
+// Unlike root balancing condition this one is part of the re-balancing
+// algorithm and has to be always matching against right depth for
+// algorithm to be correct.
+inline bool IsNodeBalanced(const CordRep* node) {
+  if (node->tag != CONCAT) return true;
+
+  size_t node_depth = node->concat()->depth();
+
+  return node->length >= kMinLength[node_depth];
 }
 
 static CordRep* Rebalance(CordRep* node);
-static void DumpNode(CordRep* rep, bool include_data, std::ostream* os);
-static bool VerifyNode(CordRep* root, CordRep* start_node,
+static void DumpNode(const CordRep* rep, bool include_data, std::ostream* os);
+static bool VerifyNode(const CordRep* root, const CordRep* start_node,
                        bool full_validation);
 
 static inline CordRep* VerifyTree(CordRep* node) {
@@ -318,7 +306,8 @@ __attribute__((preserve_most))
 static void UnrefInternal(CordRep* rep) {
   assert(rep != nullptr);
 
-  absl::InlinedVector<CordRep*, kInlinedVectorSize> pending;
+  cord_internal::RebalancingStack pending;
+
   while (true) {
     if (rep->tag == CONCAT) {
       CordRepConcat* rep_concat = rep->concat();
@@ -400,6 +389,11 @@ static void SetConcatChildren(CordRepConcat* concat, CordRep* left,
 
   concat->length = left->length + right->length;
   concat->set_depth(1 + std::max(Depth(left), Depth(right)));
+
+  ABSL_INTERNAL_CHECK(concat->depth() <= cord_internal::MaxCordDepth(),
+                      "Cord depth exceeds max");
+  ABSL_INTERNAL_CHECK(concat->length >= left->length, "Cord is too long");
+  ABSL_INTERNAL_CHECK(concat->length >= right->length, "Cord is too long");
 }
 
 // Create a concatenation of the specified nodes.
@@ -425,7 +419,7 @@ static CordRep* RawConcat(CordRep* left, CordRep* right) {
 
 static CordRep* Concat(CordRep* left, CordRep* right) {
   CordRep* rep = RawConcat(left, right);
-  if (rep != nullptr && !IsRootBalanced(rep)) {
+  if (rep != nullptr && ShouldRebalance(rep)) {
     rep = Rebalance(rep);
   }
   return VerifyTree(rep);
@@ -720,6 +714,14 @@ void Cord::InlineRep::ClearSlow() {
   memset(data_, 0, sizeof(data_));
 }
 
+inline Cord::InternalChunkIterator Cord::internal_chunk_begin() const {
+  return InternalChunkIterator(this);
+}
+
+inline Cord::InternalChunkRange Cord::InternalChunks() const {
+  return InternalChunkRange(this);
+}
+
 // --------------------------------------------------------------------
 // Constructors and destructors
 
@@ -916,7 +918,7 @@ void Cord::Prepend(absl::string_view src) {
 static CordRep* RemovePrefixFrom(CordRep* node, size_t n) {
   if (n >= node->length) return nullptr;
   if (n == 0) return Ref(node);
-  absl::InlinedVector<CordRep*, kInlinedVectorSize> rhs_stack;
+  cord_internal::CordTreeMutablePath rhs_stack;
 
   while (node->tag == CONCAT) {
     assert(n <= node->length);
@@ -957,7 +959,7 @@ static CordRep* RemovePrefixFrom(CordRep* node, size_t n) {
 static CordRep* RemoveSuffixFrom(CordRep* node, size_t n) {
   if (n >= node->length) return nullptr;
   if (n == 0) return Ref(node);
-  absl::InlinedVector<CordRep*, kInlinedVectorSize> lhs_stack;
+  absl::cord_internal::CordTreeMutablePath lhs_stack;
   bool inplace_ok = node->refcount.IsOne();
 
   while (node->tag == CONCAT) {
@@ -1028,6 +1030,7 @@ void Cord::RemoveSuffix(size_t n) {
 
 // Work item for NewSubRange().
 struct SubRange {
+  SubRange() = default;
   SubRange(CordRep* a_node, size_t a_pos, size_t a_n)
       : node(a_node), pos(a_pos), n(a_n) {}
   CordRep* node;  // nullptr means concat last 2 results.
@@ -1036,8 +1039,11 @@ struct SubRange {
 };
 
 static CordRep* NewSubRange(CordRep* node, size_t pos, size_t n) {
-  absl::InlinedVector<CordRep*, kInlinedVectorSize> results;
-  absl::InlinedVector<SubRange, kInlinedVectorSize> todo;
+  cord_internal::CordTreeMutablePath results;
+  // The algorithm below in worst case scenario adds up to 3 nodes to the `todo`
+  // list, but we also pop one out on every cycle. If original tree has depth d
+  // todo list can grew up to 2*d in size.
+  cord_internal::CordTreePath<SubRange, 2 * cord_internal::MaxCordDepth()> todo;
   todo.push_back(SubRange(node, pos, n));
   do {
     const SubRange& sr = todo.back();
@@ -1074,7 +1080,7 @@ static CordRep* NewSubRange(CordRep* node, size_t pos, size_t n) {
     }
   } while (!todo.empty());
   assert(results.size() == 1);
-  return results[0];
+  return results.back();
 }
 
 Cord Cord::Subcord(size_t pos, size_t new_size) const {
@@ -1090,7 +1096,7 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const {
   } else if (new_size == 0) {
     // We want to return empty subcord, so nothing to do.
   } else if (new_size <= InlineRep::kMaxInline) {
-    Cord::ChunkIterator it = chunk_begin();
+    Cord::InternalChunkIterator it = internal_chunk_begin();
     it.AdvanceBytes(pos);
     char* dest = sub_cord.contents_.data_;
     size_t remaining_size = new_size;
@@ -1113,11 +1119,12 @@ Cord Cord::Subcord(size_t pos, size_t new_size) const {
 
 class CordForest {
  public:
-  explicit CordForest(size_t length)
-      : root_length_(length), trees_(kMinLengthSize, nullptr) {}
+  explicit CordForest(size_t length) : root_length_(length), trees_({}) {}
 
   void Build(CordRep* cord_root) {
-    std::vector<CordRep*> pending = {cord_root};
+    // We are adding up to two nodes to the `pending` list, but we also popping
+    // one, so the size of `pending` will never exceed `MaxCordDepth()`.
+    cord_internal::CordTreeMutablePath pending(cord_root);
 
     while (!pending.empty()) {
       CordRep* node = pending.back();
@@ -1129,21 +1136,20 @@ class CordForest {
       }
 
       CordRepConcat* concat_node = node->concat();
-      if (concat_node->depth() >= kMinLengthSize ||
-          concat_node->length < min_length[concat_node->depth()]) {
-        pending.push_back(concat_node->right);
-        pending.push_back(concat_node->left);
-
-        if (concat_node->refcount.IsOne()) {
-          concat_node->left = concat_freelist_;
-          concat_freelist_ = concat_node;
-        } else {
-          Ref(concat_node->right);
-          Ref(concat_node->left);
-          Unref(concat_node);
-        }
-      } else {
+      if (IsNodeBalanced(concat_node)) {
         AddNode(node);
+        continue;
+      }
+      pending.push_back(concat_node->right);
+      pending.push_back(concat_node->left);
+
+      if (concat_node->refcount.IsOne()) {
+        concat_node->left = concat_freelist_;
+        concat_freelist_ = concat_node;
+      } else {
+        Ref(concat_node->right);
+        Ref(concat_node->left);
+        Unref(concat_node);
       }
     }
   }
@@ -1175,7 +1181,7 @@ class CordForest {
 
     // Collect together everything with which we will merge with node
     int i = 0;
-    for (; node->length > min_length[i + 1]; ++i) {
+    for (; node->length >= kMinLength[i + 1]; ++i) {
       auto& tree_at_i = trees_[i];
 
       if (tree_at_i == nullptr) continue;
@@ -1186,7 +1192,7 @@ class CordForest {
     sum = AppendNode(node, sum);
 
     // Insert sum into appropriate place in the forest
-    for (; sum->length >= min_length[i]; ++i) {
+    for (; sum->length >= kMinLength[i]; ++i) {
       auto& tree_at_i = trees_[i];
       if (tree_at_i == nullptr) continue;
 
@@ -1194,7 +1200,7 @@ class CordForest {
       tree_at_i = nullptr;
     }
 
-    // min_length[0] == 1, which means sum->length >= min_length[0]
+    // kMinLength[0] == 1, which means sum->length >= kMinLength[0]
     assert(i > 0);
     trees_[i - 1] = sum;
   }
@@ -1227,9 +1233,7 @@ class CordForest {
   }
 
   size_t root_length_;
-
-  // use an inlined vector instead of a flat array to get bounds checking
-  absl::InlinedVector<CordRep*, kInlinedVectorSize> trees_;
+  std::array<cord_internal::CordRep*, cord_internal::MaxCordDepth()> trees_;
 
   // List of concat nodes we can re-use for Cord balancing.
   CordRepConcat* concat_freelist_ = nullptr;
@@ -1330,7 +1334,7 @@ inline absl::string_view Cord::InlineRep::FindFlatStartPiece() const {
 
 inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size,
                                  size_t size_to_compare) const {
-  auto advance = [](Cord::ChunkIterator* it, absl::string_view* chunk) {
+  auto advance = [](Cord::InternalChunkIterator* it, absl::string_view* chunk) {
     if (!chunk->empty()) return true;
     ++*it;
     if (it->bytes_remaining_ == 0) return false;
@@ -1338,7 +1342,7 @@ inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size,
     return true;
   };
 
-  Cord::ChunkIterator lhs_it = chunk_begin();
+  Cord::InternalChunkIterator lhs_it = internal_chunk_begin();
 
   // compared_size is inside first chunk.
   absl::string_view lhs_chunk =
@@ -1360,7 +1364,7 @@ inline int Cord::CompareSlowPath(absl::string_view rhs, size_t compared_size,
 
 inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size,
                                  size_t size_to_compare) const {
-  auto advance = [](Cord::ChunkIterator* it, absl::string_view* chunk) {
+  auto advance = [](Cord::InternalChunkIterator* it, absl::string_view* chunk) {
     if (!chunk->empty()) return true;
     ++*it;
     if (it->bytes_remaining_ == 0) return false;
@@ -1368,8 +1372,8 @@ inline int Cord::CompareSlowPath(const Cord& rhs, size_t compared_size,
     return true;
   };
 
-  Cord::ChunkIterator lhs_it = chunk_begin();
-  Cord::ChunkIterator rhs_it = rhs.chunk_begin();
+  Cord::InternalChunkIterator lhs_it = internal_chunk_begin();
+  Cord::InternalChunkIterator rhs_it = rhs.internal_chunk_begin();
 
   // compared_size is inside both first chunks.
   absl::string_view lhs_chunk =
@@ -1503,8 +1507,11 @@ void Cord::CopyToArraySlowPath(char* dst) const {
   }
 }
 
-Cord::ChunkIterator& Cord::ChunkIterator::operator++() {
-  assert(bytes_remaining_ > 0 && "Attempted to iterate past `end()`");
+template <typename StorageType>
+Cord::GenericChunkIterator<StorageType>&
+Cord::GenericChunkIterator<StorageType>::operator++() {
+  ABSL_HARDENING_ASSERT(bytes_remaining_ > 0 &&
+                        "Attempted to iterate past `end()`");
   assert(bytes_remaining_ >= current_chunk_.size());
   bytes_remaining_ -= current_chunk_.size();
 
@@ -1542,8 +1549,10 @@ Cord::ChunkIterator& Cord::ChunkIterator::operator++() {
   return *this;
 }
 
-Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
-  assert(bytes_remaining_ >= n && "Attempted to iterate past `end()`");
+template <typename StorageType>
+Cord Cord::GenericChunkIterator<StorageType>::AdvanceAndReadBytes(size_t n) {
+  ABSL_HARDENING_ASSERT(bytes_remaining_ >= n &&
+                        "Attempted to iterate past `end()`");
   Cord subcord;
 
   if (n <= InlineRep::kMaxInline) {
@@ -1655,7 +1664,8 @@ Cord Cord::ChunkIterator::AdvanceAndReadBytes(size_t n) {
   return subcord;
 }
 
-void Cord::ChunkIterator::AdvanceBytesSlowPath(size_t n) {
+template <typename StorageType>
+void Cord::GenericChunkIterator<StorageType>::AdvanceBytesSlowPath(size_t n) {
   assert(bytes_remaining_ >= n && "Attempted to iterate past `end()`");
   assert(n >= current_chunk_.size());  // This should only be called when
                                        // iterating to a new node.
@@ -1714,7 +1724,7 @@ void Cord::ChunkIterator::AdvanceBytesSlowPath(size_t n) {
 }
 
 char Cord::operator[](size_t i) const {
-  assert(i < size());
+  ABSL_HARDENING_ASSERT(i < size());
   size_t offset = i;
   const CordRep* rep = contents_.tree();
   if (rep == nullptr) {
@@ -1841,18 +1851,18 @@ absl::string_view Cord::FlattenSlowPath() {
   }
 }
 
-static void DumpNode(CordRep* rep, bool include_data, std::ostream* os) {
+static void DumpNode(const CordRep* rep, bool include_data, std::ostream* os) {
   const int kIndentStep = 1;
   int indent = 0;
-  absl::InlinedVector<CordRep*, kInlinedVectorSize> stack;
-  absl::InlinedVector<int, kInlinedVectorSize> indents;
+  cord_internal::CordTreeConstPath stack;
+  cord_internal::CordTreePath<int, cord_internal::MaxCordDepth()> indents;
   for (;;) {
     *os << std::setw(3) << rep->refcount.Get();
     *os << " " << std::setw(7) << rep->length;
     *os << " [";
-    if (include_data) *os << static_cast<void*>(rep);
+    if (include_data) *os << static_cast<const void*>(rep);
     *os << "]";
-    *os << " " << (IsRootBalanced(rep) ? 'b' : 'u');
+    *os << " " << (IsNodeBalanced(rep) ? 'b' : 'u');
     *os << " " << std::setw(indent) << "";
     if (rep->tag == CONCAT) {
       *os << "CONCAT depth=" << Depth(rep) << "\n";
@@ -1873,7 +1883,7 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os) {
       } else {
         *os << "FLAT cap=" << TagToLength(rep->tag) << " [";
         if (include_data)
-          *os << absl::CEscape(std::string(rep->data, rep->length));
+          *os << absl::CEscape(absl::string_view(rep->data, rep->length));
         *os << "]\n";
       }
       if (stack.empty()) break;
@@ -1886,19 +1896,19 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os) {
   ABSL_INTERNAL_CHECK(indents.empty(), "");
 }
 
-static std::string ReportError(CordRep* root, CordRep* node) {
+static std::string ReportError(const CordRep* root, const CordRep* node) {
   std::ostringstream buf;
   buf << "Error at node " << node << " in:";
   DumpNode(root, true, &buf);
   return buf.str();
 }
 
-static bool VerifyNode(CordRep* root, CordRep* start_node,
+static bool VerifyNode(const CordRep* root, const CordRep* start_node,
                        bool full_validation) {
-  absl::InlinedVector<CordRep*, 2> worklist;
+  cord_internal::CordTreeConstPath worklist;
   worklist.push_back(start_node);
   do {
-    CordRep* node = worklist.back();
+    const CordRep* node = worklist.back();
     worklist.pop_back();
 
     ABSL_INTERNAL_CHECK(node != nullptr, ReportError(root, node));
@@ -1948,7 +1958,7 @@ static bool VerifyNode(CordRep* root, CordRep* start_node,
   // Iterate over the tree. cur_node is never a leaf node and leaf nodes will
   // never be appended to tree_stack. This reduces overhead from manipulating
   // tree_stack.
-  absl::InlinedVector<const CordRep*, kInlinedVectorSize> tree_stack;
+  cord_internal::CordTreeConstPath tree_stack;
   const CordRep* cur_node = rep;
   while (true) {
     const CordRep* next_node = nullptr;
@@ -1995,6 +2005,9 @@ std::ostream& operator<<(std::ostream& out, const Cord& cord) {
   return out;
 }
 
+template class Cord::GenericChunkIterator<cord_internal::CordTreeMutablePath>;
+template class Cord::GenericChunkIterator<cord_internal::CordTreeDynamicPath>;
+
 namespace strings_internal {
 size_t CordTestAccess::FlatOverhead() { return kFlatOverhead; }
 size_t CordTestAccess::MaxFlatLength() { return kMaxFlatLength; }