about summary refs log tree commit diff
path: root/third_party/abseil_cpp/absl/strings/internal/cord_internal.h
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/abseil_cpp/absl/strings/internal/cord_internal.h')
-rw-r--r--third_party/abseil_cpp/absl/strings/internal/cord_internal.h186
1 files changed, 153 insertions, 33 deletions
diff --git a/third_party/abseil_cpp/absl/strings/internal/cord_internal.h b/third_party/abseil_cpp/absl/strings/internal/cord_internal.h
index 830ceaf473..aa91a691b9 100644
--- a/third_party/abseil_cpp/absl/strings/internal/cord_internal.h
+++ b/third_party/abseil_cpp/absl/strings/internal/cord_internal.h
@@ -21,6 +21,8 @@
 #include <cstdint>
 #include <type_traits>
 
+#include "absl/base/internal/invoke.h"
+#include "absl/container/internal/compressed_tuple.h"
 #include "absl/meta/type_traits.h"
 #include "absl/strings/string_view.h"
 
@@ -31,14 +33,17 @@ namespace cord_internal {
 // Wraps std::atomic for reference counting.
 class Refcount {
  public:
-  Refcount() : count_{1} {}
-  ~Refcount() {}
+  constexpr Refcount() : count_{kRefIncrement} {}
+  struct Immortal {};
+  explicit constexpr Refcount(Immortal) : count_(kImmortalTag) {}
 
-  // Increments the reference count by 1. Imposes no memory ordering.
-  inline void Increment() { count_.fetch_add(1, std::memory_order_relaxed); }
+  // Increments the reference count. Imposes no memory ordering.
+  inline void Increment() {
+    count_.fetch_add(kRefIncrement, std::memory_order_relaxed);
+  }
 
   // Asserts that the current refcount is greater than 0. If the refcount is
-  // greater than 1, decrements the reference count by 1.
+  // greater than 1, decrements the reference count.
   //
   // Returns false if there are no references outstanding; true otherwise.
   // Inserts barriers to ensure that state written before this method returns
@@ -46,19 +51,24 @@ class Refcount {
   // false.
   inline bool Decrement() {
     int32_t refcount = count_.load(std::memory_order_acquire);
-    assert(refcount > 0);
-    return refcount != 1 && count_.fetch_sub(1, std::memory_order_acq_rel) != 1;
+    assert(refcount > 0 || refcount & kImmortalTag);
+    return refcount != kRefIncrement &&
+           count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel) !=
+               kRefIncrement;
   }
 
   // Same as Decrement but expect that refcount is greater than 1.
   inline bool DecrementExpectHighRefcount() {
-    int32_t refcount = count_.fetch_sub(1, std::memory_order_acq_rel);
-    assert(refcount > 0);
-    return refcount != 1;
+    int32_t refcount =
+        count_.fetch_sub(kRefIncrement, std::memory_order_acq_rel);
+    assert(refcount > 0 || refcount & kImmortalTag);
+    return refcount != kRefIncrement;
   }
 
   // Returns the current reference count using acquire semantics.
-  inline int32_t Get() const { return count_.load(std::memory_order_acquire); }
+  inline int32_t Get() const {
+    return count_.load(std::memory_order_acquire) >> kImmortalShift;
+  }
 
   // Returns whether the atomic integer is 1.
   // If the reference count is used in the conventional way, a
@@ -68,9 +78,27 @@ class Refcount {
   // performs the memory barrier needed for the owning thread
   // to act on the object, knowing that it has exclusive access to the
   // object.
-  inline bool IsOne() { return count_.load(std::memory_order_acquire) == 1; }
+  inline bool IsOne() {
+    return count_.load(std::memory_order_acquire) == kRefIncrement;
+  }
+
+  bool IsImmortal() const {
+    return (count_.load(std::memory_order_relaxed) & kImmortalTag) != 0;
+  }
 
  private:
+  // We reserve the bottom bit to tag a reference count as immortal.
+  // By making it `1` we ensure that we never reach `0` when adding/subtracting
+  // `2`, thus it never looks as if it should be destroyed.
+  // These are used for the StringConstant constructor where we do not increase
+  // the refcount at construction time (due to constinit requirements) but we
+  // will still decrease it at destruction time to avoid branching on Unref.
+  enum {
+    kImmortalShift = 1,
+    kRefIncrement = 1 << kImmortalShift,
+    kImmortalTag = kRefIncrement - 1
+  };
+
   std::atomic<int32_t> count_;
 };
 
@@ -83,7 +111,22 @@ struct CordRepConcat;
 struct CordRepSubstring;
 struct CordRepExternal;
 
+// Various representations that we allow
+enum CordRepKind {
+  CONCAT        = 0,
+  EXTERNAL      = 1,
+  SUBSTRING     = 2,
+
+  // We have different tags for different sized flat arrays,
+  // starting with FLAT
+  FLAT          = 3,
+};
+
 struct CordRep {
+  CordRep() = default;
+  constexpr CordRep(Refcount::Immortal immortal, size_t l)
+      : length(l), refcount(immortal), tag(EXTERNAL), data{} {}
+
   // The following three fields have to be less than 32 bytes since
   // that is the smallest supported flat node size.
   size_t length;
@@ -114,35 +157,112 @@ struct CordRepSubstring : public CordRep {
   CordRep* child;
 };
 
-// TODO(strel): replace the following logic (and related functions in cord.cc)
-// with container_internal::Layout.
-
-// Alignment requirement for CordRepExternal so that the type erased releaser
-// will be stored at a suitably aligned address.
-constexpr size_t ExternalRepAlignment() {
-#if defined(__STDCPP_DEFAULT_NEW_ALIGNMENT__)
-  return __STDCPP_DEFAULT_NEW_ALIGNMENT__;
-#else
-  return alignof(max_align_t);
-#endif
-}
-
-// Type for function pointer that will invoke and destroy the type-erased
-// releaser function object. Accepts a pointer to the releaser and the
-// `string_view` that were passed in to `NewExternalRep` below. The return value
-// is the size of the `Releaser` type.
-using ExternalReleaserInvoker = size_t (*)(void*, absl::string_view);
+// Type for function pointer that will invoke the releaser function and also
+// delete the `CordRepExternalImpl` corresponding to the passed in
+// `CordRepExternal`.
+using ExternalReleaserInvoker = void (*)(CordRepExternal*);
 
 // External CordReps are allocated together with a type erased releaser. The
 // releaser is stored in the memory directly following the CordRepExternal.
-struct alignas(ExternalRepAlignment()) CordRepExternal : public CordRep {
+struct CordRepExternal : public CordRep {
+  CordRepExternal() = default;
+  explicit constexpr CordRepExternal(absl::string_view str)
+      : CordRep(Refcount::Immortal{}, str.size()),
+        base(str.data()),
+        releaser_invoker(nullptr) {}
+
   const char* base;
   // Pointer to function that knows how to call and destroy the releaser.
   ExternalReleaserInvoker releaser_invoker;
 };
 
-// TODO(strel): look into removing, it doesn't seem like anything relies on this
-static_assert(sizeof(CordRepConcat) == sizeof(CordRepSubstring), "");
+struct Rank1 {};
+struct Rank0 : Rank1 {};
+
+template <typename Releaser, typename = ::absl::base_internal::invoke_result_t<
+                                 Releaser, absl::string_view>>
+void InvokeReleaser(Rank0, Releaser&& releaser, absl::string_view data) {
+  ::absl::base_internal::invoke(std::forward<Releaser>(releaser), data);
+}
+
+template <typename Releaser,
+          typename = ::absl::base_internal::invoke_result_t<Releaser>>
+void InvokeReleaser(Rank1, Releaser&& releaser, absl::string_view) {
+  ::absl::base_internal::invoke(std::forward<Releaser>(releaser));
+}
+
+// We use CompressedTuple so that we can benefit from EBCO.
+template <typename Releaser>
+struct CordRepExternalImpl
+    : public CordRepExternal,
+      public ::absl::container_internal::CompressedTuple<Releaser> {
+  // The extra int arg is so that we can avoid interfering with copy/move
+  // constructors while still benefitting from perfect forwarding.
+  template <typename T>
+  CordRepExternalImpl(T&& releaser, int)
+      : CordRepExternalImpl::CompressedTuple(std::forward<T>(releaser)) {
+    this->releaser_invoker = &Release;
+  }
+
+  ~CordRepExternalImpl() {
+    InvokeReleaser(Rank0{}, std::move(this->template get<0>()),
+                   absl::string_view(base, length));
+  }
+
+  static void Release(CordRepExternal* rep) {
+    delete static_cast<CordRepExternalImpl*>(rep);
+  }
+};
+
+template <typename Str>
+struct ConstInitExternalStorage {
+  ABSL_CONST_INIT static CordRepExternal value;
+};
+
+template <typename Str>
+CordRepExternal ConstInitExternalStorage<Str>::value(Str::value);
+
+enum {
+  kMaxInline = 15,
+  // Tag byte & kMaxInline means we are storing a pointer.
+  kTreeFlag = 1 << 4,
+  // Tag byte & kProfiledFlag means we are profiling the Cord.
+  kProfiledFlag = 1 << 5
+};
+
+// If the data has length <= kMaxInline, we store it in `as_chars`, and
+// store the size in `tagged_size`.
+// Else we store it in a tree and store a pointer to that tree in
+// `as_tree.rep` and store a tag in `tagged_size`.
+struct AsTree {
+  absl::cord_internal::CordRep* rep;
+  char padding[kMaxInline + 1 - sizeof(absl::cord_internal::CordRep*) - 1];
+  char tagged_size;
+};
+
+constexpr char GetOrNull(absl::string_view data, size_t pos) {
+  return pos < data.size() ? data[pos] : '\0';
+}
+
+union InlineData {
+  constexpr InlineData() : as_chars{} {}
+  explicit constexpr InlineData(AsTree tree) : as_tree(tree) {}
+  explicit constexpr InlineData(absl::string_view chars)
+      : as_chars{GetOrNull(chars, 0),  GetOrNull(chars, 1),
+                 GetOrNull(chars, 2),  GetOrNull(chars, 3),
+                 GetOrNull(chars, 4),  GetOrNull(chars, 5),
+                 GetOrNull(chars, 6),  GetOrNull(chars, 7),
+                 GetOrNull(chars, 8),  GetOrNull(chars, 9),
+                 GetOrNull(chars, 10), GetOrNull(chars, 11),
+                 GetOrNull(chars, 12), GetOrNull(chars, 13),
+                 GetOrNull(chars, 14), static_cast<char>(chars.size())} {}
+
+  AsTree as_tree;
+  char as_chars[kMaxInline + 1];
+};
+static_assert(sizeof(InlineData) == kMaxInline + 1, "");
+static_assert(sizeof(AsTree) == sizeof(InlineData), "");
+static_assert(offsetof(AsTree, tagged_size) == kMaxInline, "");
 
 }  // namespace cord_internal
 ABSL_NAMESPACE_END