diff options
author | Abseil Team <absl-team@google.com> | 2019-07-23T19·42-0700 |
---|---|---|
committer | Andy Getz <durandal@google.com> | 2019-07-23T23·58-0400 |
commit | ad1485c8986246b2ae9105e512738d0e97aec887 (patch) | |
tree | d4e21608f23fe8328fd8058aaa4269be44e8b810 /absl/container | |
parent | f3840bc5e33ce4932e35986cf3718450c6f02af2 (diff) |
Export of internal Abseil changes.
-- 1f44f8f487aa3afe8248132e4081519e85671965 by CJ Johnson <johnsoncj@google.com>: Updates ScopedAllocatorWorks test for InlinedVector to not depend on specific byte counts of standard library vectors. It's too brittle in the face of capacity-changing changes to InlinedVector and does not provide signal in those breakages. PiperOrigin-RevId: 259590332 -- fef7589547e9cdd04a254f6ae06e2bd9ec2b35f0 by CJ Johnson <johnsoncj@google.com>: Updates the implementation of InlinedVector::insert(...) to be exception safe and adds an exception safety tests for insert(...) PiperOrigin-RevId: 259542968 GitOrigin-RevId: 1f44f8f487aa3afe8248132e4081519e85671965 Change-Id: I514beff56159c9c717f8d29197728011af1fecd7
Diffstat (limited to 'absl/container')
-rw-r--r-- | absl/container/inlined_vector.h | 168 | ||||
-rw-r--r-- | absl/container/inlined_vector_exception_safety_test.cc | 76 | ||||
-rw-r--r-- | absl/container/inlined_vector_test.cc | 101 | ||||
-rw-r--r-- | absl/container/internal/inlined_vector.h | 97 |
4 files changed, 236 insertions, 206 deletions
diff --git a/absl/container/inlined_vector.h b/absl/container/inlined_vector.h index 7552723d6a37..84ac67eb5eb5 100644 --- a/absl/container/inlined_vector.h +++ b/absl/container/inlined_vector.h @@ -549,15 +549,15 @@ class InlinedVector { // of `v` starting at `pos`. Returns an `iterator` pointing to the first of // the newly inserted elements. iterator insert(const_iterator pos, size_type n, const_reference v) { - assert(pos >= begin() && pos <= end()); - if (ABSL_PREDICT_FALSE(n == 0)) { + assert(pos >= begin()); + assert(pos <= end()); + + if (ABSL_PREDICT_TRUE(n != 0)) { + value_type dealias = v; + return storage_.Insert(pos, CopyValueAdapter(dealias), n); + } else { return const_cast<iterator>(pos); } - value_type copy = v; - std::pair<iterator, iterator> it_pair = ShiftRight(pos, n); - std::fill(it_pair.first, it_pair.second, copy); - UninitializedFill(it_pair.second, it_pair.first + n, copy); - return it_pair.first; } // Overload of `InlinedVector::insert()` for copying the contents of the @@ -577,17 +577,15 @@ class InlinedVector { EnableIfAtLeastForwardIterator<ForwardIterator>* = nullptr> iterator insert(const_iterator pos, ForwardIterator first, ForwardIterator last) { - assert(pos >= begin() && pos <= end()); - if (ABSL_PREDICT_FALSE(first == last)) { + assert(pos >= begin()); + assert(pos <= end()); + + if (ABSL_PREDICT_TRUE(first != last)) { + return storage_.Insert(pos, IteratorValueAdapter<ForwardIterator>(first), + std::distance(first, last)); + } else { return const_cast<iterator>(pos); } - auto n = std::distance(first, last); - std::pair<iterator, iterator> it_pair = ShiftRight(pos, n); - size_type used_spots = it_pair.second - it_pair.first; - auto open_spot = std::next(first, used_spots); - std::copy(first, open_spot, it_pair.first); - UninitializedCopy(open_spot, last, it_pair.second); - return it_pair.first; } // Overload of `InlinedVector::insert()` for inserting elements constructed @@ -615,23 +613,12 @@ class InlinedVector { iterator emplace(const_iterator pos, Args&&... args) { assert(pos >= begin()); assert(pos <= end()); - if (ABSL_PREDICT_FALSE(pos == end())) { - emplace_back(std::forward<Args>(args)...); - return end() - 1; - } - T new_t = T(std::forward<Args>(args)...); - - auto range = ShiftRight(pos, 1); - if (range.first == range.second) { - // constructing into uninitialized memory - Construct(range.first, std::move(new_t)); - } else { - // assigning into moved-from object - *range.first = T(std::move(new_t)); - } - - return range.first; + value_type dealias(std::forward<Args>(args)...); + return storage_.Insert(pos, + IteratorValueAdapter<MoveIterator>( + MoveIterator(std::addressof(dealias))), + 1); } // `InlinedVector::emplace_back()` @@ -746,123 +733,6 @@ class InlinedVector { template <typename H, typename TheT, size_t TheN, typename TheA> friend H AbslHashValue(H h, const absl::InlinedVector<TheT, TheN, TheA>& a); - void ResetAllocation(pointer new_data, size_type new_capacity, - size_type new_size) { - if (storage_.GetIsAllocated()) { - Destroy(storage_.GetAllocatedData(), - storage_.GetAllocatedData() + size()); - assert(begin() == storage_.GetAllocatedData()); - AllocatorTraits::deallocate(*storage_.GetAllocPtr(), - storage_.GetAllocatedData(), - storage_.GetAllocatedCapacity()); - } else { - Destroy(storage_.GetInlinedData(), storage_.GetInlinedData() + size()); - } - - storage_.SetAllocatedData(new_data, new_capacity); - storage_.SetAllocatedSize(new_size); - } - - template <typename... Args> - reference Construct(pointer p, Args&&... args) { - absl::allocator_traits<allocator_type>::construct( - *storage_.GetAllocPtr(), p, std::forward<Args>(args)...); - return *p; - } - - template <typename Iterator> - void UninitializedCopy(Iterator src, Iterator src_last, pointer dst) { - for (; src != src_last; ++dst, ++src) Construct(dst, *src); - } - - template <typename... Args> - void UninitializedFill(pointer dst, pointer dst_last, const Args&... args) { - for (; dst != dst_last; ++dst) Construct(dst, args...); - } - - // Destroy [`from`, `to`) in place. - void Destroy(pointer from, pointer to) { - for (pointer cur = from; cur != to; ++cur) { - absl::allocator_traits<allocator_type>::destroy(*storage_.GetAllocPtr(), - cur); - } -#if !defined(NDEBUG) - // Overwrite unused memory with `0xab` so we can catch uninitialized usage. - // Cast to `void*` to tell the compiler that we don't care that we might be - // scribbling on a vtable pointer. - if (from != to) { - auto len = sizeof(value_type) * std::distance(from, to); - std::memset(reinterpret_cast<void*>(from), 0xab, len); - } -#endif // !defined(NDEBUG) - } - - // Shift all elements from `position` to `end()` by `n` places to the right. - // If the vector needs to be enlarged, memory will be allocated. - // Returns `iterator`s pointing to the start of the previously-initialized - // portion and the start of the uninitialized portion of the created gap. - // The number of initialized spots is `pair.second - pair.first`. The number - // of raw spots is `n - (pair.second - pair.first)`. - // - // Updates the size of the InlinedVector internally. - std::pair<iterator, iterator> ShiftRight(const_iterator position, - size_type n) { - iterator start_used = const_cast<iterator>(position); - iterator start_raw = const_cast<iterator>(position); - size_type s = size(); - size_type required_size = s + n; - - if (required_size > capacity()) { - // Compute new capacity by repeatedly doubling current capacity - size_type new_capacity = capacity(); - while (new_capacity < required_size) { - new_capacity <<= 1; - } - // Move everyone into the new allocation, leaving a gap of `n` for the - // requested shift. - pointer new_data = - AllocatorTraits::allocate(*storage_.GetAllocPtr(), new_capacity); - size_type index = position - begin(); - UninitializedCopy(std::make_move_iterator(data()), - std::make_move_iterator(data() + index), new_data); - UninitializedCopy(std::make_move_iterator(data() + index), - std::make_move_iterator(data() + s), - new_data + index + n); - ResetAllocation(new_data, new_capacity, s); - - // New allocation means our iterator is invalid, so we'll recalculate. - // Since the entire gap is in new space, there's no used space to reuse. - start_raw = begin() + index; - start_used = start_raw; - } else { - // If we had enough space, it's a two-part move. Elements going into - // previously-unoccupied space need an `UninitializedCopy()`. Elements - // going into a previously-occupied space are just a `std::move()`. - iterator pos = const_cast<iterator>(position); - iterator raw_space = end(); - size_type slots_in_used_space = raw_space - pos; - size_type new_elements_in_used_space = (std::min)(n, slots_in_used_space); - size_type new_elements_in_raw_space = n - new_elements_in_used_space; - size_type old_elements_in_used_space = - slots_in_used_space - new_elements_in_used_space; - - UninitializedCopy( - std::make_move_iterator(pos + old_elements_in_used_space), - std::make_move_iterator(raw_space), - raw_space + new_elements_in_raw_space); - std::move_backward(pos, pos + old_elements_in_used_space, raw_space); - - // If the gap is entirely in raw space, the used space starts where the - // raw space starts, leaving no elements in used space. If the gap is - // entirely in used space, the raw space starts at the end of the gap, - // leaving all elements accounted for within the used space. - start_used = pos; - start_raw = pos + new_elements_in_used_space; - } - storage_.AddSize(n); - return std::make_pair(start_used, start_raw); - } - Storage storage_; }; diff --git a/absl/container/inlined_vector_exception_safety_test.cc b/absl/container/inlined_vector_exception_safety_test.cc index 2d1ce485ae41..ff0da75b7ba7 100644 --- a/absl/container/inlined_vector_exception_safety_test.cc +++ b/absl/container/inlined_vector_exception_safety_test.cc @@ -279,6 +279,82 @@ TYPED_TEST(TwoSizeTest, Resize) { })); } +TYPED_TEST(OneSizeTest, Insert) { + using VecT = typename TypeParam::VecT; + using value_type = typename VecT::value_type; + constexpr static auto from_size = TypeParam::GetSizeAt(0); + + auto tester = testing::MakeExceptionSafetyTester() + .WithInitialValue(VecT{from_size}) + .WithContracts(InlinedVectorInvariants<VecT>); + + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->begin(); + vec->insert(it, value_type{}); + })); + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->begin() + (vec->size() / 2); + vec->insert(it, value_type{}); + })); + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->end(); + vec->insert(it, value_type{}); + })); +} + +TYPED_TEST(TwoSizeTest, Insert) { + using VecT = typename TypeParam::VecT; + using value_type = typename VecT::value_type; + constexpr static auto from_size = TypeParam::GetSizeAt(0); + constexpr static auto count = TypeParam::GetSizeAt(1); + + auto tester = testing::MakeExceptionSafetyTester() + .WithInitialValue(VecT{from_size}) + .WithContracts(InlinedVectorInvariants<VecT>); + + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->begin(); + vec->insert(it, count, value_type{}); + })); + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->begin() + (vec->size() / 2); + vec->insert(it, count, value_type{}); + })); + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->end(); + vec->insert(it, count, value_type{}); + })); + + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->begin(); + vec->insert(it, ABSL_INTERNAL_MAKE_INIT_LIST(value_type, count)); + })); + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->begin() + (vec->size() / 2); + vec->insert(it, ABSL_INTERNAL_MAKE_INIT_LIST(value_type, count)); + })); + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->end(); + vec->insert(it, ABSL_INTERNAL_MAKE_INIT_LIST(value_type, count)); + })); + + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->begin(); + std::array<value_type, count> arr{}; + vec->insert(it, arr.begin(), arr.end()); + })); + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->begin() + (vec->size() / 2); + std::array<value_type, count> arr{}; + vec->insert(it, arr.begin(), arr.end()); + })); + EXPECT_TRUE(tester.Test([](VecT* vec) { + auto it = vec->end(); + std::array<value_type, count> arr{}; + vec->insert(it, arr.begin(), arr.end()); + })); +} + TYPED_TEST(OneSizeTest, EmplaceBack) { using VecT = typename TypeParam::VecT; constexpr static auto size = TypeParam::GetSizeAt(0); diff --git a/absl/container/inlined_vector_test.cc b/absl/container/inlined_vector_test.cc index 76c470f3471b..bada4fec9ccb 100644 --- a/absl/container/inlined_vector_test.cc +++ b/absl/container/inlined_vector_test.cc @@ -1675,66 +1675,53 @@ TEST(AllocatorSupportTest, SwapOneAllocated) { EXPECT_THAT(allocated2, 0); } -TEST(AllocatorSupportTest, ScopedAllocatorWorks) { +TEST(AllocatorSupportTest, ScopedAllocatorWorksInlined) { using StdVector = std::vector<int, CountingAllocator<int>>; - using MyAlloc = std::scoped_allocator_adaptor<CountingAllocator<StdVector>>; - using AllocVec = absl::InlinedVector<StdVector, 4, MyAlloc>; - - // MSVC 2017's std::vector allocates different amounts of memory in debug - // versus opt mode. - int64_t test_allocated = 0; - StdVector v(CountingAllocator<int>{&test_allocated}); - // The amount of memory allocated by a default constructed vector<int> - auto default_std_vec_allocated = test_allocated; - v.push_back(1); - // The amound of memory allocated by a copy-constructed vector<int> with one - // element. - int64_t one_element_std_vec_copy_allocated = test_allocated; + using Alloc = CountingAllocator<StdVector>; + using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>; + using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>; - int64_t allocated = 0; - AllocVec vec(MyAlloc{CountingAllocator<StdVector>{&allocated}}); - EXPECT_EQ(allocated, 0); + int64_t total_allocated_byte_count = 0; - // This default constructs a vector<int>, but the allocator should pass itself - // into the vector<int>, so check allocation compared to that. - // The absl::InlinedVector does not allocate any memory. - // The vector<int> may allocate any memory. - auto expected = default_std_vec_allocated; - vec.resize(1); - EXPECT_EQ(allocated, expected); - - // We make vector<int> allocate memory. - // It must go through the allocator even though we didn't construct the - // vector directly. This assumes that vec[0] doesn't need to grow its - // allocation. - expected += sizeof(int); - vec[0].push_back(1); - EXPECT_EQ(allocated, expected); - - // Another allocating vector. - expected += one_element_std_vec_copy_allocated; - vec.push_back(vec[0]); - EXPECT_EQ(allocated, expected); - - // Overflow the inlined memory. - // The absl::InlinedVector will now allocate. - expected += sizeof(StdVector) * 8 + default_std_vec_allocated * 3; - vec.resize(5); - EXPECT_EQ(allocated, expected); - - // Adding one more in external mode should also work. - expected += one_element_std_vec_copy_allocated; - vec.push_back(vec[0]); - EXPECT_EQ(allocated, expected); - - // And extending these should still work. This assumes that vec[0] does not - // need to grow its allocation. - expected += sizeof(int); - vec[0].push_back(1); - EXPECT_EQ(allocated, expected); - - vec.clear(); - EXPECT_EQ(allocated, 0); + AllocVec inlined_case(ScopedAlloc(Alloc(+&total_allocated_byte_count))); + + // Called only once to remain inlined + inlined_case.emplace_back(); + + int64_t absl_responsible_for_count = total_allocated_byte_count; + EXPECT_EQ(absl_responsible_for_count, 0); + + inlined_case[0].emplace_back(); + EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count); + + inlined_case.clear(); + inlined_case.shrink_to_fit(); + EXPECT_EQ(total_allocated_byte_count, 0); +} + +TEST(AllocatorSupportTest, ScopedAllocatorWorksAllocated) { + using StdVector = std::vector<int, CountingAllocator<int>>; + using Alloc = CountingAllocator<StdVector>; + using ScopedAlloc = std::scoped_allocator_adaptor<Alloc>; + using AllocVec = absl::InlinedVector<StdVector, 1, ScopedAlloc>; + + int64_t total_allocated_byte_count = 0; + + AllocVec allocated_case(ScopedAlloc(Alloc(+&total_allocated_byte_count))); + + // Called twice to force into being allocated + allocated_case.emplace_back(); + allocated_case.emplace_back(); + + int64_t absl_responsible_for_count = total_allocated_byte_count; + EXPECT_GT(absl_responsible_for_count, 0); + + allocated_case[1].emplace_back(); + EXPECT_GT(total_allocated_byte_count, absl_responsible_for_count); + + allocated_case.clear(); + allocated_case.shrink_to_fit(); + EXPECT_EQ(total_allocated_byte_count, 0); } TEST(AllocatorSupportTest, SizeAllocConstructor) { diff --git a/absl/container/internal/inlined_vector.h b/absl/container/internal/inlined_vector.h index 0ab2d7daeaf2..7954b2b51889 100644 --- a/absl/container/internal/inlined_vector.h +++ b/absl/container/internal/inlined_vector.h @@ -380,6 +380,10 @@ class Storage { template <typename ValueAdapter> void Resize(ValueAdapter values, size_type new_size); + template <typename ValueAdapter> + iterator Insert(const_iterator pos, ValueAdapter values, + size_type insert_count); + template <typename... Args> reference EmplaceBack(Args&&... args); @@ -564,6 +568,99 @@ auto Storage<T, N, A>::Resize(ValueAdapter values, size_type new_size) -> void { } template <typename T, size_t N, typename A> +template <typename ValueAdapter> +auto Storage<T, N, A>::Insert(const_iterator pos, ValueAdapter values, + size_type insert_count) -> iterator { + StorageView storage_view = MakeStorageView(); + + size_type insert_index = + std::distance(const_iterator(storage_view.data), pos); + size_type insert_end_index = insert_index + insert_count; + size_type new_size = storage_view.size + insert_count; + + if (new_size > storage_view.capacity) { + AllocationTransaction allocation_tx(GetAllocPtr()); + ConstructionTransaction construction_tx(GetAllocPtr()); + ConstructionTransaction move_construciton_tx(GetAllocPtr()); + + IteratorValueAdapter<MoveIterator> move_values( + MoveIterator(storage_view.data)); + + pointer new_data = allocation_tx.Allocate( + LegacyNextCapacityFrom(storage_view.capacity, new_size)); + + construction_tx.Construct(new_data + insert_index, &values, insert_count); + + move_construciton_tx.Construct(new_data, &move_values, insert_index); + + inlined_vector_internal::ConstructElements( + GetAllocPtr(), new_data + insert_end_index, &move_values, + storage_view.size - insert_index); + + inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data, + storage_view.size); + + construction_tx.Commit(); + move_construciton_tx.Commit(); + DeallocateIfAllocated(); + AcquireAllocation(&allocation_tx); + + SetAllocatedSize(new_size); + return iterator(new_data + insert_index); + } else { + size_type move_construction_destination_index = + (std::max)(insert_end_index, storage_view.size); + + ConstructionTransaction move_construction_tx(GetAllocPtr()); + + IteratorValueAdapter<MoveIterator> move_construction_values( + MoveIterator(storage_view.data + + (move_construction_destination_index - insert_count))); + absl::Span<value_type> move_construction = { + storage_view.data + move_construction_destination_index, + new_size - move_construction_destination_index}; + + pointer move_assignment_values = storage_view.data + insert_index; + absl::Span<value_type> move_assignment = { + storage_view.data + insert_end_index, + move_construction_destination_index - insert_end_index}; + + absl::Span<value_type> insert_assignment = {move_assignment_values, + move_construction.size()}; + + absl::Span<value_type> insert_construction = { + insert_assignment.data() + insert_assignment.size(), + insert_count - insert_assignment.size()}; + + move_construction_tx.Construct(move_construction.data(), + &move_construction_values, + move_construction.size()); + + for (pointer destination = move_assignment.data() + move_assignment.size(), + last_destination = move_assignment.data(), + source = move_assignment_values + move_assignment.size(); + ;) { + --destination; + --source; + if (destination < last_destination) break; + *destination = std::move(*source); + } + + inlined_vector_internal::AssignElements(insert_assignment.data(), &values, + insert_assignment.size()); + + inlined_vector_internal::ConstructElements( + GetAllocPtr(), insert_construction.data(), &values, + insert_construction.size()); + + move_construction_tx.Commit(); + + AddSize(insert_count); + return iterator(storage_view.data + insert_index); + } +} + +template <typename T, size_t N, typename A> template <typename... Args> auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> reference { StorageView storage_view = MakeStorageView(); |