about summary refs log tree commit diff
path: root/absl/container/internal
diff options
context:
space:
mode:
authorAbseil Team <absl-team@google.com>2019-04-04T15·13-0700
committerGennadiy Rozental <rogeeff@google.com>2019-04-04T20·07-0400
commit666fc1266bccfd8e6eaaa084e7b42580bb8eb199 (patch)
tree9a9366d8846fb3905f3f224d0e90b9de9d5f730b /absl/container/internal
parent93dfcf74cb5fccae3da07897d8613ae6cab958a0 (diff)
Export of internal Abseil changes.
--
bc89d3221e3927d08881d75eeee0e8db862300fa by Benjamin Barenblat <bbaren@google.com>:

Clean up C-style casts in `ABSL_ASSERT`

PiperOrigin-RevId: 241932756

--
17482daae4b3e2fc725b759586590ac466b72a1e by Jon Cohen <cohenjon@google.com>:

Move Gtest-specific CMake code to its own directory

PiperOrigin-RevId: 241920192

--
9ae52b4f665625352c0a789cff884bde492c28f5 by CJ Johnson <johnsoncj@google.com>:

Moves private data methods from InlinedVector to InlinedVector Storage in anticipation of migrating the Rep union type

PiperOrigin-RevId: 241794144

--
95315bc50a61a0aae4f171b44c2312158a43e72e by Jon Cohen <cohenjon@google.com>:

Use /DNOMINMAX in Abseil tests.  This offsets inlcudes of <windows.h> from gtest.

PiperOrigin-RevId: 241790584

--
ee505c7f2ab99d29c165ea21a07190474f64053d by CJ Johnson <johnsoncj@google.com>:

Adds inlined_vector_internal to the deps of inlined_vector in CMakeLists.txt

PiperOrigin-RevId: 241775332

--
94eb5165b49bab59ce7de143be38a4581d5658da by CJ Johnson <johnsoncj@google.com>:

Migrates InlinedVector Storage to class Metadata for compatibility with the eventual member-wise migration to the new exception safe implementation

PiperOrigin-RevId: 241633420

--
f99e172caad1ec8b35bf7bbabaf2833d55a6f055 by Abseil Team <absl-team@google.com>:

Add MSVC specific linker flags only to MSVC builds.

PiperOrigin-RevId: 241615711

--
3ad19d2779281e945bdf56643dc5cee3f730eb4f by Abseil Team <absl-team@google.com>:

Add a comment about per-process randomization of absl::Hash.

PiperOrigin-RevId: 241583697

--
8dfb02d725fee3528351b2da4ed32a7455f9858a by Tom Manshreck <shreck@google.com>:

Internal change

PiperOrigin-RevId: 241564734
GitOrigin-RevId: bc89d3221e3927d08881d75eeee0e8db862300fa
Change-Id: Ibad3da416d08a96ec1f8313f8b519b4270b7e01a
Diffstat (limited to 'absl/container/internal')
-rw-r--r--absl/container/internal/inlined_vector.h90
1 files changed, 63 insertions, 27 deletions
diff --git a/absl/container/internal/inlined_vector.h b/absl/container/internal/inlined_vector.h
index 24059d94c876..7aa05b6a9974 100644
--- a/absl/container/internal/inlined_vector.h
+++ b/absl/container/internal/inlined_vector.h
@@ -18,7 +18,9 @@
 #include <cstddef>
 #include <iterator>
 #include <memory>
+#include <utility>
 
+#include "absl/container/internal/compressed_tuple.h"
 #include "absl/meta/type_traits.h"
 
 namespace absl {
@@ -31,6 +33,8 @@ template <template <typename, size_t, typename> class InlinedVector, typename T,
           size_t N, typename A>
 class Storage<InlinedVector<T, N, A>> {
  public:
+  class Allocation;  // TODO(johnsoncj): Remove after migration
+
   using allocator_type = A;
   using value_type = typename allocator_type::value_type;
   using pointer = typename allocator_type::pointer;
@@ -45,38 +49,63 @@ class Storage<InlinedVector<T, N, A>> {
   using reverse_iterator = std::reverse_iterator<iterator>;
   using const_reverse_iterator = std::reverse_iterator<const_iterator>;
 
-  explicit Storage(const allocator_type& a) : allocator_and_tag_(a) {}
+  explicit Storage(const allocator_type& alloc)
+      : metadata_(alloc, /* empty and inlined */ 0) {}
 
-  // TODO(johnsoncj): Make the below types and members private after migration
+  size_type GetSize() const { return GetSizeAndIsAllocated() >> 1; }
 
-  // Holds whether the vector is allocated or not in the lowest bit and the size
-  // in the high bits:
-  //   `size_ = (size << 1) | is_allocated;`
-  class Tag {
-    size_type size_;
+  bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; }
 
-   public:
-    Tag() : size_(0) {}
-    size_type size() const { return size_ / 2; }
-    void add_size(size_type n) { size_ += n * 2; }
-    void set_inline_size(size_type n) { size_ = n * 2; }
-    void set_allocated_size(size_type n) { size_ = (n * 2) + 1; }
-    bool allocated() const { return size_ % 2; }
-  };
+  Allocation& GetAllocation() {
+    return reinterpret_cast<Allocation&>(rep_.allocation_storage.allocation);
+  }
 
-  // Derives from `allocator_type` to use the empty base class optimization.
-  // If the `allocator_type` is stateless, we can store our instance for free.
-  class AllocatorAndTag : private allocator_type {
-    Tag tag_;
+  const Allocation& GetAllocation() const {
+    return reinterpret_cast<const Allocation&>(
+        rep_.allocation_storage.allocation);
+  }
 
-   public:
-    explicit AllocatorAndTag(const allocator_type& a) : allocator_type(a) {}
-    Tag& tag() { return tag_; }
-    const Tag& tag() const { return tag_; }
-    allocator_type& allocator() { return *this; }
-    const allocator_type& allocator() const { return *this; }
-  };
+  pointer GetInlinedData() {
+    return reinterpret_cast<pointer>(
+        std::addressof(rep_.inlined_storage.inlined[0]));
+  }
+
+  const_pointer GetInlinedData() const {
+    return reinterpret_cast<const_pointer>(
+        std::addressof(rep_.inlined_storage.inlined[0]));
+  }
+
+  pointer GetAllocatedData() { return GetAllocation().buffer(); }
+
+  const_pointer GetAllocatedData() const { return GetAllocation().buffer(); }
+
+  size_type GetAllocatedCapacity() const { return GetAllocation().capacity(); }
 
+  allocator_type& GetAllocator() { return metadata_.template get<0>(); }
+
+  const allocator_type& GetAllocator() const {
+    return metadata_.template get<0>();
+  }
+
+  void SetAllocatedSize(size_type size) {
+    GetSizeAndIsAllocated() = (size << 1) | static_cast<size_type>(1);
+  }
+
+  void SetInlinedSize(size_type size) { GetSizeAndIsAllocated() = size << 1; }
+
+  void AddSize(size_type count) { GetSizeAndIsAllocated() += count << 1; }
+
+  void InitAllocation(const Allocation& allocation) {
+    new (static_cast<void*>(std::addressof(rep_.allocation_storage.allocation)))
+        Allocation(allocation);
+  }
+
+  void SwapSizeAndIsAllocated(Storage& other) {
+    using std::swap;
+    swap(GetSizeAndIsAllocated(), other.GetSizeAndIsAllocated());
+  }
+
+  // TODO(johnsoncj): Make the below types private after migration
   class Allocation {
     size_type capacity_;
     pointer buffer_;
@@ -95,6 +124,13 @@ class Storage<InlinedVector<T, N, A>> {
     }
   };
 
+ private:
+  size_type& GetSizeAndIsAllocated() { return metadata_.template get<1>(); }
+
+  const size_type& GetSizeAndIsAllocated() const {
+    return metadata_.template get<1>();
+  }
+
   // Stores either the inlined or allocated representation
   union Rep {
     using ValueTypeBuffer =
@@ -116,7 +152,7 @@ class Storage<InlinedVector<T, N, A>> {
     AllocatedRep allocation_storage;
   };
 
-  AllocatorAndTag allocator_and_tag_;
+  container_internal::CompressedTuple<allocator_type, size_type> metadata_;
   Rep rep_;
 };