about summary refs log tree commit diff
path: root/absl/container/internal
diff options
context:
space:
mode:
authorAbseil Team <absl-team@google.com>2019-08-07T22·25-0700
committerCJ Johnson <johnsoncj@google.com>2019-08-08T15·25-0400
commit8efba58a3b656e9b41fb0471ae6453425a61c520 (patch)
treecbf508ad433c030e577afb87b89faba36539549b /absl/container/internal
parentb49b8d16b67ec6912899684b732e6367f258cfdb (diff)
Export of internal Abseil changes
--
38bc0644e17bf9fe4d78d3db92cd06f585b99ba7 by Andy Soffer <asoffer@google.com>:

Change benchmark to be cc_binary instead of cc_test, and fix a bug in the zipf_distribution benchmark in which arguments were passed in the wrong order.

PiperOrigin-RevId: 262227159

--
3b5411d8f285a758a1713f7ef0dbfa3518f2b38b by CJ Johnson <johnsoncj@google.com>:

Updates Simple<*>() overload to match the name schema of the others

PiperOrigin-RevId: 262211217

--
0cb6812cb8b6e3bf0386b9354189ffcf46c4c094 by Andy Soffer <asoffer@google.com>:

Removing period in trailing namespace comments.

PiperOrigin-RevId: 262210952

--
c903feae3a881be81adf37e9fccd558ee3ed1e64 by CJ Johnson <johnsoncj@google.com>:

This is a cleanup on the public header of InlinedVector to be more presentable

PiperOrigin-RevId: 262207691

--
9a94384dc79cdcf38f6153894f337ebb744e2d76 by Tom Manshreck <shreck@google.com>:

Fix incorrect doc on operator()[] for flat_hash_set

PiperOrigin-RevId: 262206962

--
17e88ee10b727af82c04f8150b6d246eaac836cb by Derek Mauro <dmauro@google.com>:

Fix gcc-5 build error

PiperOrigin-RevId: 262198236
GitOrigin-RevId: 38bc0644e17bf9fe4d78d3db92cd06f585b99ba7
Change-Id: I77cababa47ba3ee8b6cebb2c2cfc9f60a331f6b7
Diffstat (limited to 'absl/container/internal')
-rw-r--r--absl/container/internal/inlined_vector.h61
1 files changed, 19 insertions, 42 deletions
diff --git a/absl/container/internal/inlined_vector.h b/absl/container/internal/inlined_vector.h
index b241d0e086de..17e203e51f5e 100644
--- a/absl/container/internal/inlined_vector.h
+++ b/absl/container/internal/inlined_vector.h
@@ -71,14 +71,12 @@ template <typename AllocatorType, typename ValueType, typename ValueAdapter,
           typename SizeType>
 void ConstructElements(AllocatorType* alloc_ptr, ValueType* construct_first,
                        ValueAdapter* values_ptr, SizeType construct_size) {
-  // If any construction fails, all completed constructions are rolled back.
   for (SizeType i = 0; i < construct_size; ++i) {
     ABSL_INTERNAL_TRY {
       values_ptr->ConstructNext(alloc_ptr, construct_first + i);
     }
     ABSL_INTERNAL_CATCH_ANY {
       inlined_vector_internal::DestroyElements(alloc_ptr, construct_first, i);
-
       ABSL_INTERNAL_RETHROW;
     }
   }
@@ -171,6 +169,12 @@ class AllocationTransaction {
   explicit AllocationTransaction(AllocatorType* alloc_ptr)
       : alloc_data_(*alloc_ptr, nullptr) {}
 
+  ~AllocationTransaction() {
+    if (DidAllocate()) {
+      AllocatorTraits::deallocate(GetAllocator(), GetData(), GetCapacity());
+    }
+  }
+
   AllocationTransaction(const AllocationTransaction&) = delete;
   void operator=(const AllocationTransaction&) = delete;
 
@@ -185,12 +189,6 @@ class AllocationTransaction {
     return GetData();
   }
 
-  ~AllocationTransaction() {
-    if (DidAllocate()) {
-      AllocatorTraits::deallocate(GetAllocator(), GetData(), GetCapacity());
-    }
-  }
-
  private:
   container_internal::CompressedTuple<AllocatorType, pointer> alloc_data_;
   size_type capacity_ = 0;
@@ -205,9 +203,21 @@ class ConstructionTransaction {
   explicit ConstructionTransaction(AllocatorType* alloc_ptr)
       : alloc_data_(*alloc_ptr, nullptr) {}
 
+  ~ConstructionTransaction() {
+    if (DidConstruct()) {
+      inlined_vector_internal::DestroyElements(std::addressof(GetAllocator()),
+                                               GetData(), GetSize());
+    }
+  }
+
   ConstructionTransaction(const ConstructionTransaction&) = delete;
   void operator=(const ConstructionTransaction&) = delete;
 
+  AllocatorType& GetAllocator() { return alloc_data_.template get<0>(); }
+  pointer& GetData() { return alloc_data_.template get<1>(); }
+  size_type& GetSize() { return size_; }
+
+  bool DidConstruct() { return GetData() != nullptr; }
   template <typename ValueAdapter>
   void Construct(pointer data, ValueAdapter* values_ptr, size_type size) {
     inlined_vector_internal::ConstructElements(std::addressof(GetAllocator()),
@@ -220,18 +230,7 @@ class ConstructionTransaction {
     GetSize() = 0;
   }
 
-  ~ConstructionTransaction() {
-    if (GetData() != nullptr) {
-      inlined_vector_internal::DestroyElements(std::addressof(GetAllocator()),
-                                               GetData(), GetSize());
-    }
-  }
-
  private:
-  AllocatorType& GetAllocator() { return alloc_data_.template get<0>(); }
-  pointer& GetData() { return alloc_data_.template get<1>(); }
-  size_type& GetSize() { return size_; }
-
   container_internal::CompressedTuple<AllocatorType, pointer> alloc_data_;
   size_type size_ = 0;
 };
@@ -345,6 +344,7 @@ class Storage {
 
   void SubtractSize(size_type count) {
     assert(count <= GetSize());
+
     GetSizeAndIsAllocated() -= count << 1;
   }
 
@@ -533,22 +533,14 @@ auto Storage<T, N, A>::Resize(ValueAdapter values, size_type new_size) -> void {
   if (new_size > storage_view.capacity) {
     size_type new_capacity = ComputeCapacity(storage_view.capacity, new_size);
     pointer new_data = allocation_tx.Allocate(new_capacity);
-
-    // Construct new objects in `new_data`
     construct_loop = {new_data + storage_view.size,
                       new_size - storage_view.size};
-
-    // Move all existing objects into `new_data`
     move_construct_loop = {new_data, storage_view.size};
-
-    // Destroy all existing objects in `storage_view.data`
     destroy_loop = {storage_view.data, storage_view.size};
   } else if (new_size > storage_view.size) {
-    // Construct new objects in `storage_view.data`
     construct_loop = {storage_view.data + storage_view.size,
                       new_size - storage_view.size};
   } else {
-    // Destroy end `storage_view.size - new_size` objects in `storage_view.data`
     destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
   }
 
@@ -797,8 +789,6 @@ auto Storage<T, N, A>::ShrinkToFit() -> void {
                                                &move_values, storage_view.size);
   }
   ABSL_INTERNAL_CATCH_ANY {
-    // Writing to inlined data will trample on the existing state, thus it needs
-    // to be restored when a construction fails.
     SetAllocatedData(storage_view.data, storage_view.capacity);
     ABSL_INTERNAL_RETHROW;
   }
@@ -822,13 +812,8 @@ auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
   assert(this != other_storage_ptr);
 
   if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) {
-    // Both are allocated, thus we can swap the allocations at the top level.
-
     swap(data_.allocated, other_storage_ptr->data_.allocated);
   } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) {
-    // Both are inlined, thus element-wise swap up to smaller size, then move
-    // the remaining elements.
-
     Storage* small_ptr = this;
     Storage* large_ptr = other_storage_ptr;
     if (small_ptr->GetSize() > large_ptr->GetSize()) swap(small_ptr, large_ptr);
@@ -850,11 +835,6 @@ auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
         large_ptr->GetInlinedData() + small_ptr->GetSize(),
         large_ptr->GetSize() - small_ptr->GetSize());
   } else {
-    // One is allocated and the other is inlined, thus we first move the
-    // elements from the inlined instance to the inlined space in the allocated
-    // instance and then we can finish by having the other vector take on the
-    // allocation.
-
     Storage* allocated_ptr = this;
     Storage* inlined_ptr = other_storage_ptr;
     if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr);
@@ -872,8 +852,6 @@ auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
           &move_values, inlined_ptr->GetSize());
     }
     ABSL_INTERNAL_CATCH_ANY {
-      // Writing to inlined data will trample on the existing state, thus it
-      // needs to be restored when a construction fails.
       allocated_ptr->SetAllocatedData(allocated_storage_view.data,
                                       allocated_storage_view.capacity);
       ABSL_INTERNAL_RETHROW;
@@ -887,7 +865,6 @@ auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
                                   allocated_storage_view.capacity);
   }
 
-  // All cases swap the size, `is_allocated` boolean and the allocator.
   swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated());
   swap(*GetAllocPtr(), *other_storage_ptr->GetAllocPtr());
 }