http://git-wip-us.apache.org/repos/asf/marmotta/blob/0eb556da/libraries/ostrich/backend/3rdparty/abseil/absl/container/inlined_vector_test.cc ---------------------------------------------------------------------- diff --git a/libraries/ostrich/backend/3rdparty/abseil/absl/container/inlined_vector_test.cc b/libraries/ostrich/backend/3rdparty/abseil/absl/container/inlined_vector_test.cc new file mode 100644 index 0000000..26a7d5b --- /dev/null +++ b/libraries/ostrich/backend/3rdparty/abseil/absl/container/inlined_vector_test.cc @@ -0,0 +1,1766 @@ +// Copyright 2017 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/container/inlined_vector.h" + +#include <algorithm> +#include <forward_list> +#include <list> +#include <memory> +#include <scoped_allocator> +#include <sstream> +#include <stdexcept> +#include <string> +#include <vector> + +#include "gmock/gmock.h" +#include "gtest/gtest.h" +#include "absl/base/attributes.h" +#include "absl/base/internal/exception_testing.h" +#include "absl/base/internal/raw_logging.h" +#include "absl/base/macros.h" +#include "absl/container/internal/test_instance_tracker.h" +#include "absl/memory/memory.h" +#include "absl/strings/str_cat.h" + +namespace { + +using absl::test_internal::CopyableMovableInstance; +using absl::test_internal::CopyableOnlyInstance; +using absl::test_internal::InstanceTracker; +using testing::AllOf; +using testing::Each; +using testing::ElementsAre; +using testing::ElementsAreArray; +using testing::Eq; +using testing::Gt; +using testing::PrintToString; + +using IntVec = absl::InlinedVector<int, 8>; + +MATCHER_P(SizeIs, n, "") { + return testing::ExplainMatchResult(n, arg.size(), result_listener); +} + +MATCHER_P(CapacityIs, n, "") { + return testing::ExplainMatchResult(n, arg.capacity(), result_listener); +} + +MATCHER_P(ValueIs, e, "") { + return testing::ExplainMatchResult(e, arg.value(), result_listener); +} + +// TODO(bsamwel): Add support for movable-only types. + +// Test fixture for typed tests on BaseCountedInstance derived classes, see +// test_instance_tracker.h. +template <typename T> +class InstanceTest : public ::testing::Test {}; +TYPED_TEST_CASE_P(InstanceTest); + +// A simple reference counted class to make sure that the proper elements are +// destroyed in the erase(begin, end) test. +class RefCounted { + public: + RefCounted(int value, int* count) : value_(value), count_(count) { + Ref(); + } + + RefCounted(const RefCounted& v) + : value_(v.value_), count_(v.count_) { + Ref(); + } + + ~RefCounted() { + Unref(); + count_ = nullptr; + } + + friend void swap(RefCounted& a, RefCounted& b) { + using std::swap; + swap(a.value_, b.value_); + swap(a.count_, b.count_); + } + + RefCounted& operator=(RefCounted v) { + using std::swap; + swap(*this, v); + return *this; + } + + void Ref() const { + ABSL_RAW_CHECK(count_ != nullptr, ""); + ++(*count_); + } + + void Unref() const { + --(*count_); + ABSL_RAW_CHECK(*count_ >= 0, ""); + } + + int value_; + int* count_; +}; + +using RefCountedVec = absl::InlinedVector<RefCounted, 8>; + +// A class with a vtable pointer +class Dynamic { + public: + virtual ~Dynamic() {} +}; + +using DynamicVec = absl::InlinedVector<Dynamic, 8>; + +// Append 0..len-1 to *v +template <typename Container> +static void Fill(Container* v, int len, int offset = 0) { + for (int i = 0; i < len; i++) { + v->push_back(i + offset); + } +} + +static IntVec Fill(int len, int offset = 0) { + IntVec v; + Fill(&v, len, offset); + return v; +} + +// This is a stateful allocator, but the state lives outside of the +// allocator (in whatever test is using the allocator). This is odd +// but helps in tests where the allocator is propagated into nested +// containers - that chain of allocators uses the same state and is +// thus easier to query for aggregate allocation information. +template <typename T> +class CountingAllocator : public std::allocator<T> { + public: + using Alloc = std::allocator<T>; + using pointer = typename Alloc::pointer; + using size_type = typename Alloc::size_type; + + CountingAllocator() : bytes_used_(nullptr) {} + explicit CountingAllocator(int64_t* b) : bytes_used_(b) {} + + template <typename U> + CountingAllocator(const CountingAllocator<U>& x) + : Alloc(x), bytes_used_(x.bytes_used_) {} + + pointer allocate(size_type n, + std::allocator<void>::const_pointer hint = nullptr) { + assert(bytes_used_ != nullptr); + *bytes_used_ += n * sizeof(T); + return Alloc::allocate(n, hint); + } + + void deallocate(pointer p, size_type n) { + Alloc::deallocate(p, n); + assert(bytes_used_ != nullptr); + *bytes_used_ -= n * sizeof(T); + } + + template<typename U> + class rebind { + public: + using other = CountingAllocator<U>; + }; + + friend bool operator==(const CountingAllocator& a, + const CountingAllocator& b) { + return a.bytes_used_ == b.bytes_used_; + } + + friend bool operator!=(const CountingAllocator& a, + const CountingAllocator& b) { + return !(a == b); + } + + int64_t* bytes_used_; +}; + +TEST(IntVec, SimpleOps) { + for (int len = 0; len < 20; len++) { + IntVec v; + const IntVec& cv = v; // const alias + + Fill(&v, len); + EXPECT_EQ(len, v.size()); + EXPECT_LE(len, v.capacity()); + + for (int i = 0; i < len; i++) { + EXPECT_EQ(i, v[i]); + EXPECT_EQ(i, v.at(i)); + } + EXPECT_EQ(v.begin(), v.data()); + EXPECT_EQ(cv.begin(), cv.data()); + + int counter = 0; + for (IntVec::iterator iter = v.begin(); iter != v.end(); ++iter) { + EXPECT_EQ(counter, *iter); + counter++; + } + EXPECT_EQ(counter, len); + + counter = 0; + for (IntVec::const_iterator iter = v.begin(); iter != v.end(); ++iter) { + EXPECT_EQ(counter, *iter); + counter++; + } + EXPECT_EQ(counter, len); + + counter = 0; + for (IntVec::const_iterator iter = v.cbegin(); iter != v.cend(); ++iter) { + EXPECT_EQ(counter, *iter); + counter++; + } + EXPECT_EQ(counter, len); + + if (len > 0) { + EXPECT_EQ(0, v.front()); + EXPECT_EQ(len - 1, v.back()); + v.pop_back(); + EXPECT_EQ(len - 1, v.size()); + for (int i = 0; i < v.size(); ++i) { + EXPECT_EQ(i, v[i]); + EXPECT_EQ(i, v.at(i)); + } + } + } +} + +TEST(IntVec, AtThrows) { + IntVec v = {1, 2, 3}; + EXPECT_EQ(v.at(2), 3); + ABSL_BASE_INTERNAL_EXPECT_FAIL(v.at(3), std::out_of_range, + "failed bounds check"); +} + +TEST(IntVec, ReverseIterator) { + for (int len = 0; len < 20; len++) { + IntVec v; + Fill(&v, len); + + int counter = len; + for (IntVec::reverse_iterator iter = v.rbegin(); iter != v.rend(); ++iter) { + counter--; + EXPECT_EQ(counter, *iter); + } + EXPECT_EQ(counter, 0); + + counter = len; + for (IntVec::const_reverse_iterator iter = v.rbegin(); iter != v.rend(); + ++iter) { + counter--; + EXPECT_EQ(counter, *iter); + } + EXPECT_EQ(counter, 0); + + counter = len; + for (IntVec::const_reverse_iterator iter = v.crbegin(); iter != v.crend(); + ++iter) { + counter--; + EXPECT_EQ(counter, *iter); + } + EXPECT_EQ(counter, 0); + } +} + +TEST(IntVec, Erase) { + for (int len = 1; len < 20; len++) { + for (int i = 0; i < len; ++i) { + IntVec v; + Fill(&v, len); + v.erase(v.begin() + i); + EXPECT_EQ(len - 1, v.size()); + for (int j = 0; j < i; ++j) { + EXPECT_EQ(j, v[j]); + } + for (int j = i; j < len - 1; ++j) { + EXPECT_EQ(j + 1, v[j]); + } + } + } +} + +// At the end of this test loop, the elements between [erase_begin, erase_end) +// should have reference counts == 0, and all others elements should have +// reference counts == 1. +TEST(RefCountedVec, EraseBeginEnd) { + for (int len = 1; len < 20; ++len) { + for (int erase_begin = 0; erase_begin < len; ++erase_begin) { + for (int erase_end = erase_begin; erase_end <= len; ++erase_end) { + std::vector<int> counts(len, 0); + RefCountedVec v; + for (int i = 0; i < len; ++i) { + v.push_back(RefCounted(i, &counts[i])); + } + + int erase_len = erase_end - erase_begin; + + v.erase(v.begin() + erase_begin, v.begin() + erase_end); + + EXPECT_EQ(len - erase_len, v.size()); + + // Check the elements before the first element erased. + for (int i = 0; i < erase_begin; ++i) { + EXPECT_EQ(i, v[i].value_); + } + + // Check the elements after the first element erased. + for (int i = erase_begin; i < v.size(); ++i) { + EXPECT_EQ(i + erase_len, v[i].value_); + } + + // Check that the elements at the beginning are preserved. + for (int i = 0; i < erase_begin; ++i) { + EXPECT_EQ(1, counts[i]); + } + + // Check that the erased elements are destroyed + for (int i = erase_begin; i < erase_end; ++i) { + EXPECT_EQ(0, counts[i]); + } + + // Check that the elements at the end are preserved. + for (int i = erase_end; i< len; ++i) { + EXPECT_EQ(1, counts[i]); + } + } + } + } +} + +struct NoDefaultCtor { + explicit NoDefaultCtor(int) {} +}; +struct NoCopy { + NoCopy() {} + NoCopy(const NoCopy&) = delete; +}; +struct NoAssign { + NoAssign() {} + NoAssign& operator=(const NoAssign&) = delete; +}; +struct MoveOnly { + MoveOnly() {} + MoveOnly(MoveOnly&&) = default; + MoveOnly& operator=(MoveOnly&&) = default; +}; +TEST(InlinedVectorTest, NoDefaultCtor) { + absl::InlinedVector<NoDefaultCtor, 1> v(10, NoDefaultCtor(2)); + (void)v; +} +TEST(InlinedVectorTest, NoCopy) { + absl::InlinedVector<NoCopy, 1> v(10); + (void)v; +} +TEST(InlinedVectorTest, NoAssign) { + absl::InlinedVector<NoAssign, 1> v(10); + (void)v; +} +TEST(InlinedVectorTest, MoveOnly) { + absl::InlinedVector<MoveOnly, 2> v; + v.push_back(MoveOnly{}); + v.push_back(MoveOnly{}); + v.push_back(MoveOnly{}); + v.erase(v.begin()); + v.push_back(MoveOnly{}); + v.erase(v.begin(), v.begin() + 1); + v.insert(v.begin(), MoveOnly{}); + v.emplace(v.begin()); + v.emplace(v.begin(), MoveOnly{}); +} +TEST(InlinedVectorTest, Noexcept) { + EXPECT_TRUE(std::is_nothrow_move_constructible<IntVec>::value); + EXPECT_TRUE((std::is_nothrow_move_constructible< + absl::InlinedVector<MoveOnly, 2>>::value)); + + struct MoveCanThrow { + MoveCanThrow(MoveCanThrow&&) {} + }; + EXPECT_EQ(absl::default_allocator_is_nothrow::value, + (std::is_nothrow_move_constructible< + absl::InlinedVector<MoveCanThrow, 2>>::value)); +} + +TEST(InlinedVectorTest, EmplaceBack) { + absl::InlinedVector<std::pair<std::string, int>, 1> v; + + auto& inlined_element = v.emplace_back("answer", 42); + EXPECT_EQ(&inlined_element, &v[0]); + EXPECT_EQ(inlined_element.first, "answer"); + EXPECT_EQ(inlined_element.second, 42); + + auto& allocated_element = v.emplace_back("taxicab", 1729); + EXPECT_EQ(&allocated_element, &v[1]); + EXPECT_EQ(allocated_element.first, "taxicab"); + EXPECT_EQ(allocated_element.second, 1729); +} + +TEST(InlinedVectorTest, ShrinkToFitGrowingVector) { + absl::InlinedVector<std::pair<std::string, int>, 1> v; + + v.shrink_to_fit(); + EXPECT_EQ(v.capacity(), 1); + + v.emplace_back("answer", 42); + v.shrink_to_fit(); + EXPECT_EQ(v.capacity(), 1); + + v.emplace_back("taxicab", 1729); + EXPECT_GE(v.capacity(), 2); + v.shrink_to_fit(); + EXPECT_EQ(v.capacity(), 2); + + v.reserve(100); + EXPECT_GE(v.capacity(), 100); + v.shrink_to_fit(); + EXPECT_EQ(v.capacity(), 2); +} + +TEST(InlinedVectorTest, ShrinkToFitEdgeCases) { + { + absl::InlinedVector<std::pair<std::string, int>, 1> v; + v.emplace_back("answer", 42); + v.emplace_back("taxicab", 1729); + EXPECT_GE(v.capacity(), 2); + v.pop_back(); + v.shrink_to_fit(); + EXPECT_EQ(v.capacity(), 1); + EXPECT_EQ(v[0].first, "answer"); + EXPECT_EQ(v[0].second, 42); + } + + { + absl::InlinedVector<std::string, 2> v(100); + v.resize(0); + v.shrink_to_fit(); + EXPECT_EQ(v.capacity(), 2); // inlined capacity + } + + { + absl::InlinedVector<std::string, 2> v(100); + v.resize(1); + v.shrink_to_fit(); + EXPECT_EQ(v.capacity(), 2); // inlined capacity + } + + { + absl::InlinedVector<std::string, 2> v(100); + v.resize(2); + v.shrink_to_fit(); + EXPECT_EQ(v.capacity(), 2); + } + + { + absl::InlinedVector<std::string, 2> v(100); + v.resize(3); + v.shrink_to_fit(); + EXPECT_EQ(v.capacity(), 3); + } +} + +TEST(IntVec, Insert) { + for (int len = 0; len < 20; len++) { + for (int pos = 0; pos <= len; pos++) { + { + // Single element + std::vector<int> std_v; + Fill(&std_v, len); + IntVec v; + Fill(&v, len); + + std_v.insert(std_v.begin() + pos, 9999); + IntVec::iterator it = v.insert(v.cbegin() + pos, 9999); + EXPECT_THAT(v, ElementsAreArray(std_v)); + EXPECT_EQ(it, v.cbegin() + pos); + } + { + // n elements + std::vector<int> std_v; + Fill(&std_v, len); + IntVec v; + Fill(&v, len); + + IntVec::size_type n = 5; + std_v.insert(std_v.begin() + pos, n, 9999); + IntVec::iterator it = v.insert(v.cbegin() + pos, n, 9999); + EXPECT_THAT(v, ElementsAreArray(std_v)); + EXPECT_EQ(it, v.cbegin() + pos); + } + { + // Iterator range (random access iterator) + std::vector<int> std_v; + Fill(&std_v, len); + IntVec v; + Fill(&v, len); + + const std::vector<int> input = {9999, 8888, 7777}; + std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend()); + IntVec::iterator it = + v.insert(v.cbegin() + pos, input.cbegin(), input.cend()); + EXPECT_THAT(v, ElementsAreArray(std_v)); + EXPECT_EQ(it, v.cbegin() + pos); + } + { + // Iterator range (forward iterator) + std::vector<int> std_v; + Fill(&std_v, len); + IntVec v; + Fill(&v, len); + + const std::forward_list<int> input = {9999, 8888, 7777}; + std_v.insert(std_v.begin() + pos, input.cbegin(), input.cend()); + IntVec::iterator it = + v.insert(v.cbegin() + pos, input.cbegin(), input.cend()); + EXPECT_THAT(v, ElementsAreArray(std_v)); + EXPECT_EQ(it, v.cbegin() + pos); + } + { + // Iterator range (input iterator) + std::vector<int> std_v; + Fill(&std_v, len); + IntVec v; + Fill(&v, len); + + std_v.insert(std_v.begin() + pos, {9999, 8888, 7777}); + std::istringstream input("9999 8888 7777"); + IntVec::iterator it = + v.insert(v.cbegin() + pos, std::istream_iterator<int>(input), + std::istream_iterator<int>()); + EXPECT_THAT(v, ElementsAreArray(std_v)); + EXPECT_EQ(it, v.cbegin() + pos); + } + { + // Initializer list + std::vector<int> std_v; + Fill(&std_v, len); + IntVec v; + Fill(&v, len); + + std_v.insert(std_v.begin() + pos, {9999, 8888}); + IntVec::iterator it = v.insert(v.cbegin() + pos, {9999, 8888}); + EXPECT_THAT(v, ElementsAreArray(std_v)); + EXPECT_EQ(it, v.cbegin() + pos); + } + } + } +} + +TEST(RefCountedVec, InsertConstructorDestructor) { + // Make sure the proper construction/destruction happen during insert + // operations. + for (int len = 0; len < 20; len++) { + SCOPED_TRACE(len); + for (int pos = 0; pos <= len; pos++) { + SCOPED_TRACE(pos); + std::vector<int> counts(len, 0); + int inserted_count = 0; + RefCountedVec v; + for (int i = 0; i < len; ++i) { + SCOPED_TRACE(i); + v.push_back(RefCounted(i, &counts[i])); + } + + EXPECT_THAT(counts, Each(Eq(1))); + + RefCounted insert_element(9999, &inserted_count); + EXPECT_EQ(1, inserted_count); + v.insert(v.begin() + pos, insert_element); + EXPECT_EQ(2, inserted_count); + // Check that the elements at the end are preserved. + EXPECT_THAT(counts, Each(Eq(1))); + EXPECT_EQ(2, inserted_count); + } + } +} + +TEST(IntVec, Resize) { + for (int len = 0; len < 20; len++) { + IntVec v; + Fill(&v, len); + + // Try resizing up and down by k elements + static const int kResizeElem = 1000000; + for (int k = 0; k < 10; k++) { + // Enlarging resize + v.resize(len+k, kResizeElem); + EXPECT_EQ(len+k, v.size()); + EXPECT_LE(len+k, v.capacity()); + for (int i = 0; i < len+k; i++) { + if (i < len) { + EXPECT_EQ(i, v[i]); + } else { + EXPECT_EQ(kResizeElem, v[i]); + } + } + + // Shrinking resize + v.resize(len, kResizeElem); + EXPECT_EQ(len, v.size()); + EXPECT_LE(len, v.capacity()); + for (int i = 0; i < len; i++) { + EXPECT_EQ(i, v[i]); + } + } + } +} + +TEST(IntVec, InitWithLength) { + for (int len = 0; len < 20; len++) { + IntVec v(len, 7); + EXPECT_EQ(len, v.size()); + EXPECT_LE(len, v.capacity()); + for (int i = 0; i < len; i++) { + EXPECT_EQ(7, v[i]); + } + } +} + +TEST(IntVec, CopyConstructorAndAssignment) { + for (int len = 0; len < 20; len++) { + IntVec v; + Fill(&v, len); + EXPECT_EQ(len, v.size()); + EXPECT_LE(len, v.capacity()); + + IntVec v2(v); + EXPECT_TRUE(v == v2) << PrintToString(v) << PrintToString(v2); + + for (int start_len = 0; start_len < 20; start_len++) { + IntVec v3; + Fill(&v3, start_len, 99); // Add dummy elements that should go away + v3 = v; + EXPECT_TRUE(v == v3) << PrintToString(v) << PrintToString(v3); + } + } +} + +TEST(IntVec, AliasingCopyAssignment) { + for (int len = 0; len < 20; ++len) { + IntVec original; + Fill(&original, len); + IntVec dup = original; + dup = *&dup; + EXPECT_EQ(dup, original); + } +} + +TEST(IntVec, MoveConstructorAndAssignment) { + for (int len = 0; len < 20; len++) { + IntVec v_in; + const int inlined_capacity = v_in.capacity(); + Fill(&v_in, len); + EXPECT_EQ(len, v_in.size()); + EXPECT_LE(len, v_in.capacity()); + + { + IntVec v_temp(v_in); + auto* old_data = v_temp.data(); + IntVec v_out(std::move(v_temp)); + EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out); + if (v_in.size() > inlined_capacity) { + // Allocation is moved as a whole, data stays in place. + EXPECT_TRUE(v_out.data() == old_data); + } else { + EXPECT_FALSE(v_out.data() == old_data); + } + } + for (int start_len = 0; start_len < 20; start_len++) { + IntVec v_out; + Fill(&v_out, start_len, 99); // Add dummy elements that should go away + IntVec v_temp(v_in); + auto* old_data = v_temp.data(); + v_out = std::move(v_temp); + EXPECT_TRUE(v_in == v_out) << PrintToString(v_in) << PrintToString(v_out); + if (v_in.size() > inlined_capacity) { + // Allocation is moved as a whole, data stays in place. + EXPECT_TRUE(v_out.data() == old_data); + } else { + EXPECT_FALSE(v_out.data() == old_data); + } + } + } +} + +class NotTriviallyDestructible { + public: + NotTriviallyDestructible() : p_(new int(1)) {} + explicit NotTriviallyDestructible(int i) : p_(new int(i)) {} + + NotTriviallyDestructible(const NotTriviallyDestructible& other) + : p_(new int(*other.p_)) {} + + NotTriviallyDestructible& operator=(const NotTriviallyDestructible& other) { + p_ = absl::make_unique<int>(*other.p_); + return *this; + } + + bool operator==(const NotTriviallyDestructible& other) const { + return *p_ == *other.p_; + } + + private: + std::unique_ptr<int> p_; +}; + +TEST(AliasingTest, Emplace) { + for (int i = 2; i < 20; ++i) { + absl::InlinedVector<NotTriviallyDestructible, 10> vec; + for (int j = 0; j < i; ++j) { + vec.push_back(NotTriviallyDestructible(j)); + } + vec.emplace(vec.begin(), vec[0]); + EXPECT_EQ(vec[0], vec[1]); + vec.emplace(vec.begin() + i / 2, vec[i / 2]); + EXPECT_EQ(vec[i / 2], vec[i / 2 + 1]); + vec.emplace(vec.end() - 1, vec.back()); + EXPECT_EQ(vec[vec.size() - 2], vec.back()); + } +} + +TEST(AliasingTest, InsertWithCount) { + for (int i = 1; i < 20; ++i) { + absl::InlinedVector<NotTriviallyDestructible, 10> vec; + for (int j = 0; j < i; ++j) { + vec.push_back(NotTriviallyDestructible(j)); + } + for (int n = 0; n < 5; ++n) { + // We use back where we can because it's guaranteed to become invalidated + vec.insert(vec.begin(), n, vec.back()); + auto b = vec.begin(); + EXPECT_TRUE( + std::all_of(b, b + n, [&vec](const NotTriviallyDestructible& x) { + return x == vec.back(); + })); + + auto m_idx = vec.size() / 2; + vec.insert(vec.begin() + m_idx, n, vec.back()); + auto m = vec.begin() + m_idx; + EXPECT_TRUE( + std::all_of(m, m + n, [&vec](const NotTriviallyDestructible& x) { + return x == vec.back(); + })); + + // We want distinct values so the equality test is meaningful, + // vec[vec.size() - 1] is also almost always invalidated. + auto old_e = vec.size() - 1; + auto val = vec[old_e]; + vec.insert(vec.end(), n, vec[old_e]); + auto e = vec.begin() + old_e; + EXPECT_TRUE(std::all_of( + e, e + n, + [&val](const NotTriviallyDestructible& x) { return x == val; })); + } + } +} + +TEST(OverheadTest, Storage) { + // Check for size overhead. + // In particular, ensure that std::allocator doesn't cost anything to store. + // The union should be absorbing some of the allocation bookkeeping overhead + // in the larger vectors, leaving only the size_ field as overhead. + EXPECT_EQ(2 * sizeof(int*), + sizeof(absl::InlinedVector<int*, 1>) - 1 * sizeof(int*)); + EXPECT_EQ(1 * sizeof(int*), + sizeof(absl::InlinedVector<int*, 2>) - 2 * sizeof(int*)); + EXPECT_EQ(1 * sizeof(int*), + sizeof(absl::InlinedVector<int*, 3>) - 3 * sizeof(int*)); + EXPECT_EQ(1 * sizeof(int*), + sizeof(absl::InlinedVector<int*, 4>) - 4 * sizeof(int*)); + EXPECT_EQ(1 * sizeof(int*), + sizeof(absl::InlinedVector<int*, 5>) - 5 * sizeof(int*)); + EXPECT_EQ(1 * sizeof(int*), + sizeof(absl::InlinedVector<int*, 6>) - 6 * sizeof(int*)); + EXPECT_EQ(1 * sizeof(int*), + sizeof(absl::InlinedVector<int*, 7>) - 7 * sizeof(int*)); + EXPECT_EQ(1 * sizeof(int*), + sizeof(absl::InlinedVector<int*, 8>) - 8 * sizeof(int*)); +} + +TEST(IntVec, Clear) { + for (int len = 0; len < 20; len++) { + SCOPED_TRACE(len); + IntVec v; + Fill(&v, len); + v.clear(); + EXPECT_EQ(0, v.size()); + EXPECT_EQ(v.begin(), v.end()); + } +} + +TEST(IntVec, Reserve) { + for (int len = 0; len < 20; len++) { + IntVec v; + Fill(&v, len); + + for (int newlen = 0; newlen < 100; newlen++) { + const int* start_rep = v.data(); + v.reserve(newlen); + const int* final_rep = v.data(); + if (newlen <= len) { + EXPECT_EQ(start_rep, final_rep); + } + EXPECT_LE(newlen, v.capacity()); + + // Filling up to newlen should not change rep + while (v.size() < newlen) { + v.push_back(0); + } + EXPECT_EQ(final_rep, v.data()); + } + } +} + +TEST(StringVec, SelfRefPushBack) { + std::vector<std::string> std_v; + absl::InlinedVector<std::string, 4> v; + const std::string s = "A quite long std::string to ensure heap."; + std_v.push_back(s); + v.push_back(s); + for (int i = 0; i < 20; ++i) { + EXPECT_THAT(v, ElementsAreArray(std_v)); + + v.push_back(v.back()); + std_v.push_back(std_v.back()); + } + EXPECT_THAT(v, ElementsAreArray(std_v)); +} + +TEST(StringVec, SelfRefPushBackWithMove) { + std::vector<std::string> std_v; + absl::InlinedVector<std::string, 4> v; + const std::string s = "A quite long std::string to ensure heap."; + std_v.push_back(s); + v.push_back(s); + for (int i = 0; i < 20; ++i) { + EXPECT_EQ(v.back(), std_v.back()); + + v.push_back(std::move(v.back())); + std_v.push_back(std::move(std_v.back())); + } + EXPECT_EQ(v.back(), std_v.back()); +} + +TEST(StringVec, SelfMove) { + const std::string s = "A quite long std::string to ensure heap."; + for (int len = 0; len < 20; len++) { + SCOPED_TRACE(len); + absl::InlinedVector<std::string, 8> v; + for (int i = 0; i < len; ++i) { + SCOPED_TRACE(i); + v.push_back(s); + } + // Indirection necessary to avoid compiler warning. + v = std::move(*(&v)); + // Ensure that the inlined vector is still in a valid state by copying it. + // We don't expect specific contents since a self-move results in an + // unspecified valid state. + std::vector<std::string> copy(v.begin(), v.end()); + } +} + +TEST(IntVec, Swap) { + for (int l1 = 0; l1 < 20; l1++) { + SCOPED_TRACE(l1); + for (int l2 = 0; l2 < 20; l2++) { + SCOPED_TRACE(l2); + IntVec a = Fill(l1, 0); + IntVec b = Fill(l2, 100); + { + using std::swap; + swap(a, b); + } + EXPECT_EQ(l1, b.size()); + EXPECT_EQ(l2, a.size()); + for (int i = 0; i < l1; i++) { + SCOPED_TRACE(i); + EXPECT_EQ(i, b[i]); + } + for (int i = 0; i < l2; i++) { + SCOPED_TRACE(i); + EXPECT_EQ(100 + i, a[i]); + } + } + } +} + +TYPED_TEST_P(InstanceTest, Swap) { + using Instance = TypeParam; + using InstanceVec = absl::InlinedVector<Instance, 8>; + for (int l1 = 0; l1 < 20; l1++) { + SCOPED_TRACE(l1); + for (int l2 = 0; l2 < 20; l2++) { + SCOPED_TRACE(l2); + InstanceTracker tracker; + InstanceVec a, b; + const size_t inlined_capacity = a.capacity(); + for (int i = 0; i < l1; i++) a.push_back(Instance(i)); + for (int i = 0; i < l2; i++) b.push_back(Instance(100+i)); + EXPECT_EQ(tracker.instances(), l1 + l2); + tracker.ResetCopiesMovesSwaps(); + { + using std::swap; + swap(a, b); + } + EXPECT_EQ(tracker.instances(), l1 + l2); + if (a.size() > inlined_capacity && b.size() > inlined_capacity) { + EXPECT_EQ(tracker.swaps(), 0); // Allocations are swapped. + EXPECT_EQ(tracker.moves(), 0); + } else if (a.size() <= inlined_capacity && b.size() <= inlined_capacity) { + EXPECT_EQ(tracker.swaps(), std::min(l1, l2)); + // TODO(bsamwel): This should use moves when the type is movable. + EXPECT_EQ(tracker.copies(), std::max(l1, l2) - std::min(l1, l2)); + } else { + // One is allocated and the other isn't. The allocation is transferred + // without copying elements, and the inlined instances are copied/moved. + EXPECT_EQ(tracker.swaps(), 0); + // TODO(bsamwel): This should use moves when the type is movable. + EXPECT_EQ(tracker.copies(), std::min(l1, l2)); + } + + EXPECT_EQ(l1, b.size()); + EXPECT_EQ(l2, a.size()); + for (int i = 0; i < l1; i++) { + EXPECT_EQ(i, b[i].value()); + } + for (int i = 0; i < l2; i++) { + EXPECT_EQ(100 + i, a[i].value()); + } + } + } +} + +TEST(IntVec, EqualAndNotEqual) { + IntVec a, b; + EXPECT_TRUE(a == b); + EXPECT_FALSE(a != b); + + a.push_back(3); + EXPECT_FALSE(a == b); + EXPECT_TRUE(a != b); + + b.push_back(3); + EXPECT_TRUE(a == b); + EXPECT_FALSE(a != b); + + b.push_back(7); + EXPECT_FALSE(a == b); + EXPECT_TRUE(a != b); + + a.push_back(6); + EXPECT_FALSE(a == b); + EXPECT_TRUE(a != b); + + a.clear(); + b.clear(); + for (int i = 0; i < 100; i++) { + a.push_back(i); + b.push_back(i); + EXPECT_TRUE(a == b); + EXPECT_FALSE(a != b); + + b[i] = b[i] + 1; + EXPECT_FALSE(a == b); + EXPECT_TRUE(a != b); + + b[i] = b[i] - 1; // Back to before + EXPECT_TRUE(a == b); + EXPECT_FALSE(a != b); + } +} + +TEST(IntVec, RelationalOps) { + IntVec a, b; + EXPECT_FALSE(a < b); + EXPECT_FALSE(b < a); + EXPECT_FALSE(a > b); + EXPECT_FALSE(b > a); + EXPECT_TRUE(a <= b); + EXPECT_TRUE(b <= a); + EXPECT_TRUE(a >= b); + EXPECT_TRUE(b >= a); + b.push_back(3); + EXPECT_TRUE(a < b); + EXPECT_FALSE(b < a); + EXPECT_FALSE(a > b); + EXPECT_TRUE(b > a); + EXPECT_TRUE(a <= b); + EXPECT_FALSE(b <= a); + EXPECT_FALSE(a >= b); + EXPECT_TRUE(b >= a); +} + +TYPED_TEST_P(InstanceTest, CountConstructorsDestructors) { + using Instance = TypeParam; + using InstanceVec = absl::InlinedVector<Instance, 8>; + InstanceTracker tracker; + for (int len = 0; len < 20; len++) { + SCOPED_TRACE(len); + tracker.ResetCopiesMovesSwaps(); + + InstanceVec v; + const size_t inlined_capacity = v.capacity(); + for (int i = 0; i < len; i++) { + v.push_back(Instance(i)); + } + EXPECT_EQ(tracker.instances(), len); + EXPECT_GE(tracker.copies() + tracker.moves(), + len); // More due to reallocation. + tracker.ResetCopiesMovesSwaps(); + + // Enlarging resize() must construct some objects + tracker.ResetCopiesMovesSwaps(); + v.resize(len + 10, Instance(100)); + EXPECT_EQ(tracker.instances(), len + 10); + if (len <= inlined_capacity && len + 10 > inlined_capacity) { + EXPECT_EQ(tracker.copies() + tracker.moves(), 10 + len); + } else { + // Only specify a minimum number of copies + moves. We don't want to + // depend on the reallocation policy here. + EXPECT_GE(tracker.copies() + tracker.moves(), + 10); // More due to reallocation. + } + + // Shrinking resize() must destroy some objects + tracker.ResetCopiesMovesSwaps(); + v.resize(len, Instance(100)); + EXPECT_EQ(tracker.instances(), len); + EXPECT_EQ(tracker.copies(), 0); + EXPECT_EQ(tracker.moves(), 0); + + // reserve() must not increase the number of initialized objects + SCOPED_TRACE("reserve"); + v.reserve(len+1000); + EXPECT_EQ(tracker.instances(), len); + EXPECT_EQ(tracker.copies() + tracker.moves(), len); + + // pop_back() and erase() must destroy one object + if (len > 0) { + tracker.ResetCopiesMovesSwaps(); + v.pop_back(); + EXPECT_EQ(tracker.instances(), len - 1); + EXPECT_EQ(tracker.copies(), 0); + EXPECT_EQ(tracker.moves(), 0); + + if (!v.empty()) { + tracker.ResetCopiesMovesSwaps(); + v.erase(v.begin()); + EXPECT_EQ(tracker.instances(), len - 2); + EXPECT_EQ(tracker.copies() + tracker.moves(), len - 2); + } + } + + tracker.ResetCopiesMovesSwaps(); + int instances_before_empty_erase = tracker.instances(); + v.erase(v.begin(), v.begin()); + EXPECT_EQ(tracker.instances(), instances_before_empty_erase); + EXPECT_EQ(tracker.copies() + tracker.moves(), 0); + } +} + +TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnCopyConstruction) { + using Instance = TypeParam; + using InstanceVec = absl::InlinedVector<Instance, 8>; + InstanceTracker tracker; + for (int len = 0; len < 20; len++) { + SCOPED_TRACE(len); + tracker.ResetCopiesMovesSwaps(); + + InstanceVec v; + for (int i = 0; i < len; i++) { + v.push_back(Instance(i)); + } + EXPECT_EQ(tracker.instances(), len); + EXPECT_GE(tracker.copies() + tracker.moves(), + len); // More due to reallocation. + tracker.ResetCopiesMovesSwaps(); + { // Copy constructor should create 'len' more instances. + InstanceVec v_copy(v); + EXPECT_EQ(tracker.instances(), len + len); + EXPECT_EQ(tracker.copies(), len); + EXPECT_EQ(tracker.moves(), 0); + } + EXPECT_EQ(tracker.instances(), len); + } +} + +TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveConstruction) { + using Instance = TypeParam; + using InstanceVec = absl::InlinedVector<Instance, 8>; + InstanceTracker tracker; + for (int len = 0; len < 20; len++) { + SCOPED_TRACE(len); + tracker.ResetCopiesMovesSwaps(); + + InstanceVec v; + const size_t inlined_capacity = v.capacity(); + for (int i = 0; i < len; i++) { + v.push_back(Instance(i)); + } + EXPECT_EQ(tracker.instances(), len); + EXPECT_GE(tracker.copies() + tracker.moves(), + len); // More due to reallocation. + tracker.ResetCopiesMovesSwaps(); + { + InstanceVec v_copy(std::move(v)); + if (len > inlined_capacity) { + // Allocation is moved as a whole. + EXPECT_EQ(tracker.instances(), len); + EXPECT_EQ(tracker.live_instances(), len); + // Tests an implementation detail, don't rely on this in your code. + EXPECT_EQ(v.size(), 0); // NOLINT misc-use-after-move + EXPECT_EQ(tracker.copies(), 0); + EXPECT_EQ(tracker.moves(), 0); + } else { + EXPECT_EQ(tracker.instances(), len + len); + if (Instance::supports_move()) { + EXPECT_EQ(tracker.live_instances(), len); + EXPECT_EQ(tracker.copies(), 0); + EXPECT_EQ(tracker.moves(), len); + } else { + EXPECT_EQ(tracker.live_instances(), len + len); + EXPECT_EQ(tracker.copies(), len); + EXPECT_EQ(tracker.moves(), 0); + } + } + EXPECT_EQ(tracker.swaps(), 0); + } + } +} + +TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnAssignment) { + using Instance = TypeParam; + using InstanceVec = absl::InlinedVector<Instance, 8>; + InstanceTracker tracker; + for (int len = 0; len < 20; len++) { + SCOPED_TRACE(len); + for (int longorshort = 0; longorshort <= 1; ++longorshort) { + SCOPED_TRACE(longorshort); + tracker.ResetCopiesMovesSwaps(); + + InstanceVec longer, shorter; + for (int i = 0; i < len; i++) { + longer.push_back(Instance(i)); + shorter.push_back(Instance(i)); + } + longer.push_back(Instance(len)); + EXPECT_EQ(tracker.instances(), len + len + 1); + EXPECT_GE(tracker.copies() + tracker.moves(), + len + len + 1); // More due to reallocation. + + tracker.ResetCopiesMovesSwaps(); + if (longorshort) { + shorter = longer; + EXPECT_EQ(tracker.instances(), (len + 1) + (len + 1)); + EXPECT_GE(tracker.copies() + tracker.moves(), + len + 1); // More due to reallocation. + } else { + longer = shorter; + EXPECT_EQ(tracker.instances(), len + len); + EXPECT_EQ(tracker.copies() + tracker.moves(), len); + } + } + } +} + +TYPED_TEST_P(InstanceTest, CountConstructorsDestructorsOnMoveAssignment) { + using Instance = TypeParam; + using InstanceVec = absl::InlinedVector<Instance, 8>; + InstanceTracker tracker; + for (int len = 0; len < 20; len++) { + SCOPED_TRACE(len); + for (int longorshort = 0; longorshort <= 1; ++longorshort) { + SCOPED_TRACE(longorshort); + tracker.ResetCopiesMovesSwaps(); + + InstanceVec longer, shorter; + const int inlined_capacity = longer.capacity(); + for (int i = 0; i < len; i++) { + longer.push_back(Instance(i)); + shorter.push_back(Instance(i)); + } + longer.push_back(Instance(len)); + EXPECT_EQ(tracker.instances(), len + len + 1); + EXPECT_GE(tracker.copies() + tracker.moves(), + len + len + 1); // More due to reallocation. + + tracker.ResetCopiesMovesSwaps(); + int src_len; + if (longorshort) { + src_len = len + 1; + shorter = std::move(longer); + } else { + src_len = len; + longer = std::move(shorter); + } + if (src_len > inlined_capacity) { + // Allocation moved as a whole. + EXPECT_EQ(tracker.instances(), src_len); + EXPECT_EQ(tracker.live_instances(), src_len); + EXPECT_EQ(tracker.copies(), 0); + EXPECT_EQ(tracker.moves(), 0); + } else { + // Elements are all copied. + EXPECT_EQ(tracker.instances(), src_len + src_len); + if (Instance::supports_move()) { + EXPECT_EQ(tracker.copies(), 0); + EXPECT_EQ(tracker.moves(), src_len); + EXPECT_EQ(tracker.live_instances(), src_len); + } else { + EXPECT_EQ(tracker.copies(), src_len); + EXPECT_EQ(tracker.moves(), 0); + EXPECT_EQ(tracker.live_instances(), src_len + src_len); + } + } + EXPECT_EQ(tracker.swaps(), 0); + } + } +} + +TEST(CountElemAssign, SimpleTypeWithInlineBacking) { + for (size_t original_size = 0; original_size <= 5; ++original_size) { + SCOPED_TRACE(original_size); + // Original contents are [12345, 12345, ...] + std::vector<int> original_contents(original_size, 12345); + + absl::InlinedVector<int, 2> v(original_contents.begin(), + original_contents.end()); + v.assign(2, 123); + EXPECT_THAT(v, AllOf(SizeIs(2), ElementsAre(123, 123))); + if (original_size <= 2) { + // If the original had inline backing, it should stay inline. + EXPECT_EQ(2, v.capacity()); + } + } +} + +TEST(CountElemAssign, SimpleTypeWithAllocation) { + for (size_t original_size = 0; original_size <= 5; ++original_size) { + SCOPED_TRACE(original_size); + // Original contents are [12345, 12345, ...] + std::vector<int> original_contents(original_size, 12345); + + absl::InlinedVector<int, 2> v(original_contents.begin(), + original_contents.end()); + v.assign(3, 123); + EXPECT_THAT(v, AllOf(SizeIs(3), ElementsAre(123, 123, 123))); + EXPECT_LE(v.size(), v.capacity()); + } +} + +TYPED_TEST_P(InstanceTest, CountElemAssignInlineBacking) { + using Instance = TypeParam; + for (size_t original_size = 0; original_size <= 5; ++original_size) { + SCOPED_TRACE(original_size); + // Original contents are [12345, 12345, ...] + std::vector<Instance> original_contents(original_size, Instance(12345)); + + absl::InlinedVector<Instance, 2> v(original_contents.begin(), + original_contents.end()); + v.assign(2, Instance(123)); + EXPECT_THAT(v, AllOf(SizeIs(2), ElementsAre(ValueIs(123), ValueIs(123)))); + if (original_size <= 2) { + // If the original had inline backing, it should stay inline. + EXPECT_EQ(2, v.capacity()); + } + } +} + +template <typename Instance> +void InstanceCountElemAssignWithAllocationTest() { + for (size_t original_size = 0; original_size <= 5; ++original_size) { + SCOPED_TRACE(original_size); + // Original contents are [12345, 12345, ...] + std::vector<Instance> original_contents(original_size, Instance(12345)); + + absl::InlinedVector<Instance, 2> v(original_contents.begin(), + original_contents.end()); + v.assign(3, Instance(123)); + EXPECT_THAT(v, + AllOf(SizeIs(3), + ElementsAre(ValueIs(123), ValueIs(123), ValueIs(123)))); + EXPECT_LE(v.size(), v.capacity()); + } +} +TEST(CountElemAssign, WithAllocationCopyableInstance) { + InstanceCountElemAssignWithAllocationTest<CopyableOnlyInstance>(); +} +TEST(CountElemAssign, WithAllocationCopyableMovableInstance) { + InstanceCountElemAssignWithAllocationTest<CopyableMovableInstance>(); +} + +TEST(RangedConstructor, SimpleType) { + std::vector<int> source_v = {4, 5, 6}; + // First try to fit in inline backing + absl::InlinedVector<int, 4> v(source_v.begin(), source_v.end()); + EXPECT_EQ(3, v.size()); + EXPECT_EQ(4, v.capacity()); // Indication that we're still on inlined storage + EXPECT_EQ(4, v[0]); + EXPECT_EQ(5, v[1]); + EXPECT_EQ(6, v[2]); + + // Now, force a re-allocate + absl::InlinedVector<int, 2> realloc_v(source_v.begin(), source_v.end()); + EXPECT_EQ(3, realloc_v.size()); + EXPECT_LT(2, realloc_v.capacity()); + EXPECT_EQ(4, realloc_v[0]); + EXPECT_EQ(5, realloc_v[1]); + EXPECT_EQ(6, realloc_v[2]); +} + +// Test for ranged constructors using Instance as the element type and +// SourceContainer as the source container type. +template <typename Instance, typename SourceContainer, int inlined_capacity> +void InstanceRangedConstructorTestForContainer() { + InstanceTracker tracker; + SourceContainer source_v = {Instance(0), Instance(1)}; + tracker.ResetCopiesMovesSwaps(); + absl::InlinedVector<Instance, inlined_capacity> v(source_v.begin(), + source_v.end()); + EXPECT_EQ(2, v.size()); + EXPECT_LT(1, v.capacity()); + EXPECT_EQ(0, v[0].value()); + EXPECT_EQ(1, v[1].value()); + EXPECT_EQ(tracker.copies(), 2); + EXPECT_EQ(tracker.moves(), 0); +} + +template <typename Instance, int inlined_capacity> +void InstanceRangedConstructorTestWithCapacity() { + // Test with const and non-const, random access and non-random-access sources. + // TODO(bsamwel): Test with an input iterator source. + { + SCOPED_TRACE("std::list"); + InstanceRangedConstructorTestForContainer<Instance, std::list<Instance>, + inlined_capacity>(); + { + SCOPED_TRACE("const std::list"); + InstanceRangedConstructorTestForContainer< + Instance, const std::list<Instance>, inlined_capacity>(); + } + { + SCOPED_TRACE("std::vector"); + InstanceRangedConstructorTestForContainer<Instance, std::vector<Instance>, + inlined_capacity>(); + } + { + SCOPED_TRACE("const std::vector"); + InstanceRangedConstructorTestForContainer< + Instance, const std::vector<Instance>, inlined_capacity>(); + } + } +} + +TYPED_TEST_P(InstanceTest, RangedConstructor) { + using Instance = TypeParam; + SCOPED_TRACE("capacity=1"); + InstanceRangedConstructorTestWithCapacity<Instance, 1>(); + SCOPED_TRACE("capacity=2"); + InstanceRangedConstructorTestWithCapacity<Instance, 2>(); +} + +TEST(RangedConstructor, ElementsAreConstructed) { + std::vector<std::string> source_v = {"cat", "dog"}; + + // Force expansion and re-allocation of v. Ensures that when the vector is + // expanded that new elements are constructed. + absl::InlinedVector<std::string, 1> v(source_v.begin(), source_v.end()); + EXPECT_EQ("cat", v[0]); + EXPECT_EQ("dog", v[1]); +} + +TEST(RangedAssign, SimpleType) { + // Test for all combinations of original sizes (empty and non-empty inline, + // and out of line) and target sizes. + for (size_t original_size = 0; original_size <= 5; ++original_size) { + SCOPED_TRACE(original_size); + // Original contents are [12345, 12345, ...] + std::vector<int> original_contents(original_size, 12345); + + for (size_t target_size = 0; target_size <= 5; ++target_size) { + SCOPED_TRACE(target_size); + + // New contents are [3, 4, ...] + std::vector<int> new_contents; + for (size_t i = 0; i < target_size; ++i) { + new_contents.push_back(i + 3); + } + + absl::InlinedVector<int, 3> v(original_contents.begin(), + original_contents.end()); + v.assign(new_contents.begin(), new_contents.end()); + + EXPECT_EQ(new_contents.size(), v.size()); + EXPECT_LE(new_contents.size(), v.capacity()); + if (target_size <= 3 && original_size <= 3) { + // Storage should stay inline when target size is small. + EXPECT_EQ(3, v.capacity()); + } + EXPECT_THAT(v, ElementsAreArray(new_contents)); + } + } +} + +// Returns true if lhs and rhs have the same value. +template <typename Instance> +static bool InstanceValuesEqual(const Instance& lhs, const Instance& rhs) { + return lhs.value() == rhs.value(); +} + +// Test for ranged assign() using Instance as the element type and +// SourceContainer as the source container type. +template <typename Instance, typename SourceContainer> +void InstanceRangedAssignTestForContainer() { + // Test for all combinations of original sizes (empty and non-empty inline, + // and out of line) and target sizes. + for (size_t original_size = 0; original_size <= 5; ++original_size) { + SCOPED_TRACE(original_size); + // Original contents are [12345, 12345, ...] + std::vector<Instance> original_contents(original_size, Instance(12345)); + + for (size_t target_size = 0; target_size <= 5; ++target_size) { + SCOPED_TRACE(target_size); + + // New contents are [3, 4, ...] + // Generate data using a non-const container, because SourceContainer + // itself may be const. + // TODO(bsamwel): Test with an input iterator. + std::vector<Instance> new_contents_in; + for (size_t i = 0; i < target_size; ++i) { + new_contents_in.push_back(Instance(i + 3)); + } + SourceContainer new_contents(new_contents_in.begin(), + new_contents_in.end()); + + absl::InlinedVector<Instance, 3> v(original_contents.begin(), + original_contents.end()); + v.assign(new_contents.begin(), new_contents.end()); + + EXPECT_EQ(new_contents.size(), v.size()); + EXPECT_LE(new_contents.size(), v.capacity()); + if (target_size <= 3 && original_size <= 3) { + // Storage should stay inline when target size is small. + EXPECT_EQ(3, v.capacity()); + } + EXPECT_TRUE(std::equal(v.begin(), v.end(), new_contents.begin(), + InstanceValuesEqual<Instance>)); + } + } +} + +TYPED_TEST_P(InstanceTest, RangedAssign) { + using Instance = TypeParam; + // Test with const and non-const, random access and non-random-access sources. + // TODO(bsamwel): Test with an input iterator source. + SCOPED_TRACE("std::list"); + InstanceRangedAssignTestForContainer<Instance, std::list<Instance>>(); + SCOPED_TRACE("const std::list"); + InstanceRangedAssignTestForContainer<Instance, const std::list<Instance>>(); + SCOPED_TRACE("std::vector"); + InstanceRangedAssignTestForContainer<Instance, std::vector<Instance>>(); + SCOPED_TRACE("const std::vector"); + InstanceRangedAssignTestForContainer<Instance, const std::vector<Instance>>(); +} + +TEST(InitializerListConstructor, SimpleTypeWithInlineBacking) { + EXPECT_THAT((absl::InlinedVector<int, 4>{4, 5, 6}), + AllOf(SizeIs(3), CapacityIs(4), ElementsAre(4, 5, 6))); +} + +TEST(InitializerListConstructor, SimpleTypeWithReallocationRequired) { + EXPECT_THAT((absl::InlinedVector<int, 2>{4, 5, 6}), + AllOf(SizeIs(3), CapacityIs(Gt(2)), ElementsAre(4, 5, 6))); +} + +TEST(InitializerListConstructor, DisparateTypesInList) { + EXPECT_THAT((absl::InlinedVector<int, 2>{-7, 8ULL}), ElementsAre(-7, 8)); + + EXPECT_THAT((absl::InlinedVector<std::string, 2>{"foo", std::string("bar")}), + ElementsAre("foo", "bar")); +} + +TEST(InitializerListConstructor, ComplexTypeWithInlineBacking) { + EXPECT_THAT((absl::InlinedVector<CopyableMovableInstance, 1>{ + CopyableMovableInstance(0)}), + AllOf(SizeIs(1), CapacityIs(1), ElementsAre(ValueIs(0)))); +} + +TEST(InitializerListConstructor, ComplexTypeWithReallocationRequired) { + EXPECT_THAT( + (absl::InlinedVector<CopyableMovableInstance, 1>{ + CopyableMovableInstance(0), CopyableMovableInstance(1)}), + AllOf(SizeIs(2), CapacityIs(Gt(1)), ElementsAre(ValueIs(0), ValueIs(1)))); +} + +TEST(InitializerListAssign, SimpleTypeFitsInlineBacking) { + for (size_t original_size = 0; original_size <= 4; ++original_size) { + SCOPED_TRACE(original_size); + + absl::InlinedVector<int, 2> v1(original_size, 12345); + const size_t original_capacity_v1 = v1.capacity(); + v1.assign({3}); + EXPECT_THAT( + v1, AllOf(SizeIs(1), CapacityIs(original_capacity_v1), ElementsAre(3))); + + absl::InlinedVector<int, 2> v2(original_size, 12345); + const size_t original_capacity_v2 = v2.capacity(); + v2 = {3}; + EXPECT_THAT( + v2, AllOf(SizeIs(1), CapacityIs(original_capacity_v2), ElementsAre(3))); + } +} + +TEST(InitializerListAssign, SimpleTypeDoesNotFitInlineBacking) { + for (size_t original_size = 0; original_size <= 4; ++original_size) { + SCOPED_TRACE(original_size); + absl::InlinedVector<int, 2> v1(original_size, 12345); + v1.assign({3, 4, 5}); + EXPECT_THAT(v1, AllOf(SizeIs(3), ElementsAre(3, 4, 5))); + EXPECT_LE(3, v1.capacity()); + + absl::InlinedVector<int, 2> v2(original_size, 12345); + v2 = {3, 4, 5}; + EXPECT_THAT(v2, AllOf(SizeIs(3), ElementsAre(3, 4, 5))); + EXPECT_LE(3, v2.capacity()); + } +} + +TEST(InitializerListAssign, DisparateTypesInList) { + absl::InlinedVector<int, 2> v_int1; + v_int1.assign({-7, 8ULL}); + EXPECT_THAT(v_int1, ElementsAre(-7, 8)); + + absl::InlinedVector<int, 2> v_int2; + v_int2 = {-7, 8ULL}; + EXPECT_THAT(v_int2, ElementsAre(-7, 8)); + + absl::InlinedVector<std::string, 2> v_string1; + v_string1.assign({"foo", std::string("bar")}); + EXPECT_THAT(v_string1, ElementsAre("foo", "bar")); + + absl::InlinedVector<std::string, 2> v_string2; + v_string2 = {"foo", std::string("bar")}; + EXPECT_THAT(v_string2, ElementsAre("foo", "bar")); +} + +TYPED_TEST_P(InstanceTest, InitializerListAssign) { + using Instance = TypeParam; + for (size_t original_size = 0; original_size <= 4; ++original_size) { + SCOPED_TRACE(original_size); + absl::InlinedVector<Instance, 2> v(original_size, Instance(12345)); + const size_t original_capacity = v.capacity(); + v.assign({Instance(3)}); + EXPECT_THAT(v, AllOf(SizeIs(1), CapacityIs(original_capacity), + ElementsAre(ValueIs(3)))); + } + for (size_t original_size = 0; original_size <= 4; ++original_size) { + SCOPED_TRACE(original_size); + absl::InlinedVector<Instance, 2> v(original_size, Instance(12345)); + v.assign({Instance(3), Instance(4), Instance(5)}); + EXPECT_THAT(v, AllOf(SizeIs(3), + ElementsAre(ValueIs(3), ValueIs(4), ValueIs(5)))); + EXPECT_LE(3, v.capacity()); + } +} + +REGISTER_TYPED_TEST_CASE_P(InstanceTest, Swap, CountConstructorsDestructors, + CountConstructorsDestructorsOnCopyConstruction, + CountConstructorsDestructorsOnMoveConstruction, + CountConstructorsDestructorsOnAssignment, + CountConstructorsDestructorsOnMoveAssignment, + CountElemAssignInlineBacking, RangedConstructor, + RangedAssign, InitializerListAssign); + +using InstanceTypes = + ::testing::Types<CopyableOnlyInstance, CopyableMovableInstance>; +INSTANTIATE_TYPED_TEST_CASE_P(InstanceTestOnTypes, InstanceTest, InstanceTypes); + +TEST(DynamicVec, DynamicVecCompiles) { + DynamicVec v; + (void)v; +} + +TEST(AllocatorSupportTest, Constructors) { + using MyAlloc = CountingAllocator<int>; + using AllocVec = absl::InlinedVector<int, 4, MyAlloc>; + const int ia[] = { 0, 1, 2, 3, 4, 5, 6, 7 }; + int64_t allocated = 0; + MyAlloc alloc(&allocated); + { AllocVec ABSL_ATTRIBUTE_UNUSED v; } + { AllocVec ABSL_ATTRIBUTE_UNUSED v(alloc); } + { AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc); } + { AllocVec ABSL_ATTRIBUTE_UNUSED v({1, 2, 3}, alloc); } + + AllocVec v2; + { AllocVec ABSL_ATTRIBUTE_UNUSED v(v2, alloc); } + { AllocVec ABSL_ATTRIBUTE_UNUSED v(std::move(v2), alloc); } +} + +TEST(AllocatorSupportTest, CountAllocations) { + using MyAlloc = CountingAllocator<int>; + using AllocVec = absl::InlinedVector<int, 4, MyAlloc>; + const int ia[] = { 0, 1, 2, 3, 4, 5, 6, 7 }; + int64_t allocated = 0; + MyAlloc alloc(&allocated); + { + AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + 4, alloc); + EXPECT_THAT(allocated, 0); + } + EXPECT_THAT(allocated, 0); + { + AllocVec ABSL_ATTRIBUTE_UNUSED v(ia, ia + ABSL_ARRAYSIZE(ia), alloc); + EXPECT_THAT(allocated, v.size() * sizeof(int)); + } + EXPECT_THAT(allocated, 0); + { + AllocVec v(4, 1, alloc); + EXPECT_THAT(allocated, 0); + + int64_t allocated2 = 0; + MyAlloc alloc2(&allocated2); + AllocVec v2(v, alloc2); + EXPECT_THAT(allocated2, 0); + + int64_t allocated3 = 0; + MyAlloc alloc3(&allocated3); + AllocVec v3(std::move(v), alloc3); + EXPECT_THAT(allocated3, 0); + } + EXPECT_THAT(allocated, 0); + { + AllocVec v(8, 2, alloc); + EXPECT_THAT(allocated, v.size() * sizeof(int)); + + int64_t allocated2 = 0; + MyAlloc alloc2(&allocated2); + AllocVec v2(v, alloc2); + EXPECT_THAT(allocated2, v2.size() * sizeof(int)); + + int64_t allocated3 = 0; + MyAlloc alloc3(&allocated3); + AllocVec v3(std::move(v), alloc3); + EXPECT_THAT(allocated3, v3.size() * sizeof(int)); + } + EXPECT_EQ(allocated, 0); + { + // Test shrink_to_fit deallocations. + AllocVec v(8, 2, alloc); + EXPECT_EQ(allocated, 8 * sizeof(int)); + v.resize(5); + EXPECT_EQ(allocated, 8 * sizeof(int)); + v.shrink_to_fit(); + EXPECT_EQ(allocated, 5 * sizeof(int)); + v.resize(4); + EXPECT_EQ(allocated, 5 * sizeof(int)); + v.shrink_to_fit(); + EXPECT_EQ(allocated, 0); + } +} + +TEST(AllocatorSupportTest, SwapBothAllocated) { + using MyAlloc = CountingAllocator<int>; + using AllocVec = absl::InlinedVector<int, 4, MyAlloc>; + int64_t allocated1 = 0; + int64_t allocated2 = 0; + { + const int ia1[] = { 0, 1, 2, 3, 4, 5, 6, 7 }; + const int ia2[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8 }; + MyAlloc a1(&allocated1); + MyAlloc a2(&allocated2); + AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1); + AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2); + EXPECT_LT(v1.capacity(), v2.capacity()); + EXPECT_THAT(allocated1, v1.capacity() * sizeof(int)); + EXPECT_THAT(allocated2, v2.capacity() * sizeof(int)); + v1.swap(v2); + EXPECT_THAT(v1, ElementsAreArray(ia2)); + EXPECT_THAT(v2, ElementsAreArray(ia1)); + EXPECT_THAT(allocated1, v2.capacity() * sizeof(int)); + EXPECT_THAT(allocated2, v1.capacity() * sizeof(int)); + } + EXPECT_THAT(allocated1, 0); + EXPECT_THAT(allocated2, 0); +} + +TEST(AllocatorSupportTest, SwapOneAllocated) { + using MyAlloc = CountingAllocator<int>; + using AllocVec = absl::InlinedVector<int, 4, MyAlloc>; + int64_t allocated1 = 0; + int64_t allocated2 = 0; + { + const int ia1[] = { 0, 1, 2, 3, 4, 5, 6, 7 }; + const int ia2[] = { 0, 1, 2, 3 }; + MyAlloc a1(&allocated1); + MyAlloc a2(&allocated2); + AllocVec v1(ia1, ia1 + ABSL_ARRAYSIZE(ia1), a1); + AllocVec v2(ia2, ia2 + ABSL_ARRAYSIZE(ia2), a2); + EXPECT_THAT(allocated1, v1.capacity() * sizeof(int)); + EXPECT_THAT(allocated2, 0); + v1.swap(v2); + EXPECT_THAT(v1, ElementsAreArray(ia2)); + EXPECT_THAT(v2, ElementsAreArray(ia1)); + EXPECT_THAT(allocated1, v2.capacity() * sizeof(int)); + EXPECT_THAT(allocated2, 0); + EXPECT_TRUE(v2.get_allocator() == a1); + EXPECT_TRUE(v1.get_allocator() == a2); + } + EXPECT_THAT(allocated1, 0); + EXPECT_THAT(allocated2, 0); +} + +TEST(AllocatorSupportTest, ScopedAllocatorWorks) { + using StdVector = std::vector<int, CountingAllocator<int>>; + using MyAlloc = + std::scoped_allocator_adaptor<CountingAllocator<StdVector>>; + using AllocVec = absl::InlinedVector<StdVector, 4, MyAlloc>; + + int64_t allocated = 0; + AllocVec vec(MyAlloc{CountingAllocator<StdVector>{&allocated}}); + EXPECT_EQ(allocated, 0); + + // This default constructs a vector<int>, but the allocator should pass itself + // into the vector<int>. + // The absl::InlinedVector does not allocate any memory. + // The vector<int> does not allocate any memory. + vec.resize(1); + EXPECT_EQ(allocated, 0); + + // We make vector<int> allocate memory. + // It must go through the allocator even though we didn't construct the + // vector directly. + vec[0].push_back(1); + EXPECT_EQ(allocated, sizeof(int) * 1); + + // Another allocating vector. + vec.push_back(vec[0]); + EXPECT_EQ(allocated, sizeof(int) * 2); + + // Overflow the inlined memory. + // The absl::InlinedVector will now allocate. + vec.resize(5); + EXPECT_EQ(allocated, sizeof(int) * 2 + sizeof(StdVector) * 8); + + // Adding one more in external mode should also work. + vec.push_back(vec[0]); + EXPECT_EQ(allocated, sizeof(int) * 3 + sizeof(StdVector) * 8); + + // And extending these should still work. + vec[0].push_back(1); + EXPECT_EQ(allocated, sizeof(int) * 4 + sizeof(StdVector) * 8); + + vec.clear(); + EXPECT_EQ(allocated, 0); +} + +} // anonymous namespace
http://git-wip-us.apache.org/repos/asf/marmotta/blob/0eb556da/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker.cc ---------------------------------------------------------------------- diff --git a/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker.cc b/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker.cc new file mode 100644 index 0000000..fe00aca --- /dev/null +++ b/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker.cc @@ -0,0 +1,26 @@ +// Copyright 2017 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/container/internal/test_instance_tracker.h" + +namespace absl { +namespace test_internal { +int BaseCountedInstance::num_instances_ = 0; +int BaseCountedInstance::num_live_instances_ = 0; +int BaseCountedInstance::num_moves_ = 0; +int BaseCountedInstance::num_copies_ = 0; +int BaseCountedInstance::num_swaps_ = 0; + +} // namespace test_internal +} // namespace absl http://git-wip-us.apache.org/repos/asf/marmotta/blob/0eb556da/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker.h ---------------------------------------------------------------------- diff --git a/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker.h b/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker.h new file mode 100644 index 0000000..cf8f3a5 --- /dev/null +++ b/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker.h @@ -0,0 +1,220 @@ +// Copyright 2017 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef ABSL_CONTAINER_INTERNAL_TEST_INSTANCE_TRACKER_H_ +#define ABSL_CONTAINER_INTERNAL_TEST_INSTANCE_TRACKER_H_ + +#include <cstdlib> +#include <ostream> + +namespace absl { +namespace test_internal { + +// A type that counts number of occurences of the type, the live occurrences of +// the type, as well as the number of copies, moves, and swaps that have +// occurred on the type. This is used as a base class for the copyable, +// copyable+movable, and movable types below that are used in actual tests. Use +// InstanceTracker in tests to track the number of instances. +class BaseCountedInstance { + public: + explicit BaseCountedInstance(int x) : value_(x) { + ++num_instances_; + ++num_live_instances_; + } + BaseCountedInstance(const BaseCountedInstance& x) + : value_(x.value_), is_live_(x.is_live_) { + ++num_instances_; + if (is_live_) ++num_live_instances_; + ++num_copies_; + } + BaseCountedInstance(BaseCountedInstance&& x) + : value_(x.value_), is_live_(x.is_live_) { + x.is_live_ = false; + ++num_instances_; + ++num_moves_; + } + ~BaseCountedInstance() { + --num_instances_; + if (is_live_) --num_live_instances_; + } + + BaseCountedInstance& operator=(const BaseCountedInstance& x) { + value_ = x.value_; + if (is_live_) --num_live_instances_; + is_live_ = x.is_live_; + if (is_live_) ++num_live_instances_; + ++num_copies_; + return *this; + } + BaseCountedInstance& operator=(BaseCountedInstance&& x) { + value_ = x.value_; + if (is_live_) --num_live_instances_; + is_live_ = x.is_live_; + x.is_live_ = false; + ++num_moves_; + return *this; + } + + int value() const { + if (!is_live_) std::abort(); + return value_; + } + + friend std::ostream& operator<<(std::ostream& o, + const BaseCountedInstance& v) { + return o << "[value:" << v.value() << "]"; + } + + // Implementation of efficient swap() that counts swaps. + static void SwapImpl( + BaseCountedInstance& lhs, // NOLINT(runtime/references) + BaseCountedInstance& rhs) { // NOLINT(runtime/references) + using std::swap; + swap(lhs.value_, rhs.value_); + swap(lhs.is_live_, rhs.is_live_); + ++BaseCountedInstance::num_swaps_; + } + + private: + friend class InstanceTracker; + + int value_; + + // Indicates if the value is live, ie it hasn't been moved away from. + bool is_live_ = true; + + // Number of instances. + static int num_instances_; + + // Number of live instances (those that have not been moved away from.) + static int num_live_instances_; + + // Number of times that BaseCountedInstance objects were moved. + static int num_moves_; + + // Number of times that BaseCountedInstance objects were copied. + static int num_copies_; + + // Number of times that BaseCountedInstance objects were swapped. + static int num_swaps_; +}; + +// Helper to track the BaseCountedInstance instance counters. Expects that the +// number of instances and live_instances are the same when it is constructed +// and when it is destructed. +class InstanceTracker { + public: + InstanceTracker() + : start_instances_(BaseCountedInstance::num_instances_), + start_live_instances_(BaseCountedInstance::num_live_instances_) { + ResetCopiesMovesSwaps(); + } + ~InstanceTracker() { + if (instances() != 0) std::abort(); + if (live_instances() != 0) std::abort(); + } + + // Returns the number of BaseCountedInstance instances both containing valid + // values and those moved away from compared to when the InstanceTracker was + // constructed + int instances() const { + return BaseCountedInstance::num_instances_ - start_instances_; + } + + // Returns the number of live BaseCountedInstance instances compared to when + // the InstanceTracker was constructed + int live_instances() const { + return BaseCountedInstance::num_live_instances_ - start_live_instances_; + } + + // Returns the number of moves on BaseCountedInstance objects since + // construction or since the last call to ResetCopiesMovesSwaps(). + int moves() const { return BaseCountedInstance::num_moves_ - start_moves_; } + + // Returns the number of copies on BaseCountedInstance objects since + // construction or the last call to ResetCopiesMovesSwaps(). + int copies() const { + return BaseCountedInstance::num_copies_ - start_copies_; + } + + // Returns the number of swaps on BaseCountedInstance objects since + // construction or the last call to ResetCopiesMovesSwaps(). + int swaps() const { return BaseCountedInstance::num_swaps_ - start_swaps_; } + + // Resets the base values for moves, copies and swaps to the current values, + // so that subsequent Get*() calls for moves, copies and swaps will compare to + // the situation at the point of this call. + void ResetCopiesMovesSwaps() { + start_moves_ = BaseCountedInstance::num_moves_; + start_copies_ = BaseCountedInstance::num_copies_; + start_swaps_ = BaseCountedInstance::num_swaps_; + } + + private: + int start_instances_; + int start_live_instances_; + int start_moves_; + int start_copies_; + int start_swaps_; +}; + +// Copyable, not movable. +class CopyableOnlyInstance : public BaseCountedInstance { + public: + explicit CopyableOnlyInstance(int x) : BaseCountedInstance(x) {} + CopyableOnlyInstance(const CopyableOnlyInstance& rhs) = default; + CopyableOnlyInstance& operator=(const CopyableOnlyInstance& rhs) = default; + + friend void swap(CopyableOnlyInstance& lhs, CopyableOnlyInstance& rhs) { + BaseCountedInstance::SwapImpl(lhs, rhs); + } + + static bool supports_move() { return false; } +}; + +// Copyable and movable. +class CopyableMovableInstance : public BaseCountedInstance { + public: + explicit CopyableMovableInstance(int x) : BaseCountedInstance(x) {} + CopyableMovableInstance(const CopyableMovableInstance& rhs) = default; + CopyableMovableInstance(CopyableMovableInstance&& rhs) = default; + CopyableMovableInstance& operator=(const CopyableMovableInstance& rhs) = + default; + CopyableMovableInstance& operator=(CopyableMovableInstance&& rhs) = default; + + friend void swap(CopyableMovableInstance& lhs, CopyableMovableInstance& rhs) { + BaseCountedInstance::SwapImpl(lhs, rhs); + } + + static bool supports_move() { return true; } +}; + +// Only movable, not default-constructible. +class MovableOnlyInstance : public BaseCountedInstance { + public: + explicit MovableOnlyInstance(int x) : BaseCountedInstance(x) {} + MovableOnlyInstance(MovableOnlyInstance&& other) = default; + MovableOnlyInstance& operator=(MovableOnlyInstance&& other) = default; + + friend void swap(MovableOnlyInstance& lhs, MovableOnlyInstance& rhs) { + BaseCountedInstance::SwapImpl(lhs, rhs); + } + + static bool supports_move() { return true; } +}; + +} // namespace test_internal +} // namespace absl + +#endif // ABSL_CONTAINER_INTERNAL_TEST_INSTANCE_TRACKER_H_ http://git-wip-us.apache.org/repos/asf/marmotta/blob/0eb556da/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker_test.cc ---------------------------------------------------------------------- diff --git a/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker_test.cc b/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker_test.cc new file mode 100644 index 0000000..9efb677 --- /dev/null +++ b/libraries/ostrich/backend/3rdparty/abseil/absl/container/internal/test_instance_tracker_test.cc @@ -0,0 +1,160 @@ +// Copyright 2017 The Abseil Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "absl/container/internal/test_instance_tracker.h" + +#include "gtest/gtest.h" + +namespace { + +using absl::test_internal::CopyableMovableInstance; +using absl::test_internal::CopyableOnlyInstance; +using absl::test_internal::InstanceTracker; +using absl::test_internal::MovableOnlyInstance; + +TEST(TestInstanceTracker, CopyableMovable) { + InstanceTracker tracker; + CopyableMovableInstance src(1); + EXPECT_EQ(1, src.value()) << src; + CopyableMovableInstance copy(src); + CopyableMovableInstance move(std::move(src)); + EXPECT_EQ(1, tracker.copies()); + EXPECT_EQ(1, tracker.moves()); + EXPECT_EQ(0, tracker.swaps()); + EXPECT_EQ(3, tracker.instances()); + EXPECT_EQ(2, tracker.live_instances()); + tracker.ResetCopiesMovesSwaps(); + + CopyableMovableInstance copy_assign(1); + copy_assign = copy; + CopyableMovableInstance move_assign(1); + move_assign = std::move(move); + EXPECT_EQ(1, tracker.copies()); + EXPECT_EQ(1, tracker.moves()); + EXPECT_EQ(0, tracker.swaps()); + EXPECT_EQ(5, tracker.instances()); + EXPECT_EQ(3, tracker.live_instances()); + tracker.ResetCopiesMovesSwaps(); + + { + using std::swap; + swap(move_assign, copy); + swap(copy, move_assign); + EXPECT_EQ(2, tracker.swaps()); + EXPECT_EQ(0, tracker.copies()); + EXPECT_EQ(0, tracker.moves()); + EXPECT_EQ(5, tracker.instances()); + EXPECT_EQ(3, tracker.live_instances()); + } +} + +TEST(TestInstanceTracker, CopyableOnly) { + InstanceTracker tracker; + CopyableOnlyInstance src(1); + EXPECT_EQ(1, src.value()) << src; + CopyableOnlyInstance copy(src); + CopyableOnlyInstance copy2(std::move(src)); // NOLINT + EXPECT_EQ(2, tracker.copies()); + EXPECT_EQ(0, tracker.moves()); + EXPECT_EQ(3, tracker.instances()); + EXPECT_EQ(3, tracker.live_instances()); + tracker.ResetCopiesMovesSwaps(); + + CopyableOnlyInstance copy_assign(1); + copy_assign = copy; + CopyableOnlyInstance copy_assign2(1); + copy_assign2 = std::move(copy2); // NOLINT + EXPECT_EQ(2, tracker.copies()); + EXPECT_EQ(0, tracker.moves()); + EXPECT_EQ(5, tracker.instances()); + EXPECT_EQ(5, tracker.live_instances()); + tracker.ResetCopiesMovesSwaps(); + + { + using std::swap; + swap(src, copy); + swap(copy, src); + EXPECT_EQ(2, tracker.swaps()); + EXPECT_EQ(0, tracker.copies()); + EXPECT_EQ(0, tracker.moves()); + EXPECT_EQ(5, tracker.instances()); + EXPECT_EQ(5, tracker.live_instances()); + } +} + +TEST(TestInstanceTracker, MovableOnly) { + InstanceTracker tracker; + MovableOnlyInstance src(1); + EXPECT_EQ(1, src.value()) << src; + MovableOnlyInstance move(std::move(src)); + MovableOnlyInstance move_assign(2); + move_assign = std::move(move); + EXPECT_EQ(3, tracker.instances()); + EXPECT_EQ(1, tracker.live_instances()); + EXPECT_EQ(2, tracker.moves()); + EXPECT_EQ(0, tracker.copies()); + tracker.ResetCopiesMovesSwaps(); + + { + using std::swap; + MovableOnlyInstance other(2); + swap(move_assign, other); + swap(other, move_assign); + EXPECT_EQ(2, tracker.swaps()); + EXPECT_EQ(0, tracker.copies()); + EXPECT_EQ(0, tracker.moves()); + EXPECT_EQ(4, tracker.instances()); + EXPECT_EQ(2, tracker.live_instances()); + } +} + +TEST(TestInstanceTracker, ExistingInstances) { + CopyableMovableInstance uncounted_instance(1); + CopyableMovableInstance uncounted_live_instance( + std::move(uncounted_instance)); + InstanceTracker tracker; + EXPECT_EQ(0, tracker.instances()); + EXPECT_EQ(0, tracker.live_instances()); + EXPECT_EQ(0, tracker.copies()); + { + CopyableMovableInstance instance1(1); + EXPECT_EQ(1, tracker.instances()); + EXPECT_EQ(1, tracker.live_instances()); + EXPECT_EQ(0, tracker.copies()); + EXPECT_EQ(0, tracker.moves()); + { + InstanceTracker tracker2; + CopyableMovableInstance instance2(instance1); + CopyableMovableInstance instance3(std::move(instance2)); + EXPECT_EQ(3, tracker.instances()); + EXPECT_EQ(2, tracker.live_instances()); + EXPECT_EQ(1, tracker.copies()); + EXPECT_EQ(1, tracker.moves()); + EXPECT_EQ(2, tracker2.instances()); + EXPECT_EQ(1, tracker2.live_instances()); + EXPECT_EQ(1, tracker2.copies()); + EXPECT_EQ(1, tracker2.moves()); + } + EXPECT_EQ(1, tracker.instances()); + EXPECT_EQ(1, tracker.live_instances()); + EXPECT_EQ(1, tracker.copies()); + EXPECT_EQ(1, tracker.moves()); + } + EXPECT_EQ(0, tracker.instances()); + EXPECT_EQ(0, tracker.live_instances()); + EXPECT_EQ(1, tracker.copies()); + EXPECT_EQ(1, tracker.moves()); +} + +} // namespace http://git-wip-us.apache.org/repos/asf/marmotta/blob/0eb556da/libraries/ostrich/backend/3rdparty/abseil/absl/copts.bzl ---------------------------------------------------------------------- diff --git a/libraries/ostrich/backend/3rdparty/abseil/absl/copts.bzl b/libraries/ostrich/backend/3rdparty/abseil/absl/copts.bzl new file mode 100644 index 0000000..20c9b61 --- /dev/null +++ b/libraries/ostrich/backend/3rdparty/abseil/absl/copts.bzl @@ -0,0 +1,155 @@ +"""absl specific copts. + +Flags specified here must not impact ABI. Code compiled with and without these +opts will be linked together, and in some cases headers compiled with and +without these options will be part of the same program. +""" +GCC_FLAGS = [ + "-Wall", + "-Wextra", + "-Wcast-qual", + "-Wconversion-null", + "-Wmissing-declarations", + "-Woverlength-strings", + "-Wpointer-arith", + "-Wunused-local-typedefs", + "-Wunused-result", + "-Wvarargs", + "-Wvla", # variable-length array + "-Wwrite-strings", + # Google style does not use unsigned integers, though STL containers + # have unsigned types. + "-Wno-sign-compare", +] + +GCC_TEST_FLAGS = [ + "-Wno-conversion-null", + "-Wno-missing-declarations", + "-Wno-sign-compare", + "-Wno-unused-function", + "-Wno-unused-parameter", + "-Wno-unused-private-field", +] + + +# Docs on single flags is preceded by a comment. +# Docs on groups of flags is preceded by ###. + +LLVM_FLAGS = [ + # All warnings are treated as errors by implicit -Werror flag + "-Wall", + "-Wextra", + "-Weverything", + # Abseil does not support C++98 + "-Wno-c++98-compat-pedantic", + # Turns off all implicit conversion warnings. Most are re-enabled below. + "-Wno-conversion", + "-Wno-covered-switch-default", + "-Wno-deprecated", + "-Wno-disabled-macro-expansion", + "-Wno-double-promotion", + ### + # Turned off as they include valid C++ code. + "-Wno-comma", + "-Wno-extra-semi", + "-Wno-packed", + "-Wno-padded", + ### + "-Wno-float-conversion", + "-Wno-float-equal", + "-Wno-format-nonliteral", + # Too aggressive: warns on Clang extensions enclosed in Clang-only + # compilation paths. + "-Wno-gcc-compat", + ### + # Some internal globals are necessary. Don't do this at home. + "-Wno-global-constructors", + "-Wno-exit-time-destructors", + ### + "-Wno-nested-anon-types", + "-Wno-non-modular-include-in-module", + "-Wno-old-style-cast", + # Warns on preferred usage of non-POD types such as string_view + "-Wno-range-loop-analysis", + "-Wno-reserved-id-macro", + "-Wno-shorten-64-to-32", + "-Wno-switch-enum", + "-Wno-thread-safety-negative", + "-Wno-undef", + "-Wno-unknown-warning-option", + "-Wno-unreachable-code", + # Causes warnings on include guards + "-Wno-unused-macros", + "-Wno-weak-vtables", + ### + # Implicit conversion warnings turned off by -Wno-conversion + # which are re-enabled below. + "-Wbitfield-enum-conversion", + "-Wbool-conversion", + "-Wconstant-conversion", + "-Wenum-conversion", + "-Wint-conversion", + "-Wliteral-conversion", + "-Wnon-literal-null-conversion", + "-Wnull-conversion", + "-Wobjc-literal-conversion", + "-Wno-sign-conversion", + "-Wstring-conversion", + ### +] + +LLVM_TEST_FLAGS = [ + "-Wno-c99-extensions", + "-Wno-missing-noreturn", + "-Wno-missing-prototypes", + "-Wno-null-conversion", + "-Wno-shadow", + "-Wno-shift-sign-overflow", + "-Wno-sign-compare", + "-Wno-unused-function", + "-Wno-unused-member-function", + "-Wno-unused-parameter", + "-Wno-unused-private-field", + "-Wno-unused-template", + "-Wno-used-but-marked-unused", + "-Wno-zero-as-null-pointer-constant", +] + +MSVC_FLAGS = [ + "/W3", + "/WX", + "/wd4005", # macro-redefinition + "/wd4068", # unknown pragma + "/wd4244", # conversion from 'type1' to 'type2', possible loss of data + "/wd4267", # conversion from 'size_t' to 'type', possible loss of data + "/wd4800", # forcing value to bool 'true' or 'false' (performance warning) + "/DNOMINMAX", # Don't define min and max macros (windows.h) + "/DWIN32_LEAN_AND_MEAN", # Don't bloat namespace with incompatible winsock versions. + "/D_CRT_SECURE_NO_WARNINGS", # Don't warn about usage of insecure C functions +] + +MSVC_TEST_FLAGS = [ + "/wd4018", # signed/unsigned mismatch + "/wd4101", # unreferenced local variable + "/wd4503", # decorated name length exceeded, name was truncated +] + +# /Wall with msvc includes unhelpful warnings such as C4711, C4710, ... +ABSL_DEFAULT_COPTS = select({ + "//absl:windows": MSVC_FLAGS, + "//absl:llvm_compiler": LLVM_FLAGS, + "//conditions:default": GCC_FLAGS, +}) + +# in absence of modules (--compiler=gcc or -c opt), cc_tests leak their copts +# to their (included header) dependencies and fail to build outside absl +ABSL_TEST_COPTS = ABSL_DEFAULT_COPTS + select({ + "//absl:windows": MSVC_TEST_FLAGS, + "//absl:llvm_compiler": LLVM_TEST_FLAGS, + "//conditions:default": GCC_TEST_FLAGS, +}) + +ABSL_EXCEPTIONS_FLAG = select({ + "//absl:windows": ["/U_HAS_EXCEPTIONS", "/D_HAS_EXCEPTIONS=1", "/EHsc"], + "//conditions:default": ["-fexceptions"], +}) http://git-wip-us.apache.org/repos/asf/marmotta/blob/0eb556da/libraries/ostrich/backend/3rdparty/abseil/absl/debugging/BUILD.bazel ---------------------------------------------------------------------- diff --git a/libraries/ostrich/backend/3rdparty/abseil/absl/debugging/BUILD.bazel b/libraries/ostrich/backend/3rdparty/abseil/absl/debugging/BUILD.bazel new file mode 100644 index 0000000..e1e7fce --- /dev/null +++ b/libraries/ostrich/backend/3rdparty/abseil/absl/debugging/BUILD.bazel @@ -0,0 +1,305 @@ +# +# Copyright 2017 The Abseil Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +load( + "//absl:copts.bzl", + "ABSL_DEFAULT_COPTS", + "ABSL_TEST_COPTS", +) + +package( + default_visibility = ["//visibility:public"], +) + +licenses(["notice"]) # Apache 2.0 + +cc_library( + name = "stacktrace", + srcs = [ + "stacktrace.cc", + ], + hdrs = ["stacktrace.h"], + copts = ABSL_DEFAULT_COPTS, + deps = [ + ":debugging_internal", + "//absl/base", + "//absl/base:core_headers", + ], +) + +cc_library( + name = "symbolize", + srcs = [ + "symbolize.cc", + "symbolize_elf.inc", + "symbolize_unimplemented.inc", + "symbolize_win32.inc", + ], + hdrs = [ + "internal/symbolize.h", + "symbolize.h", + ], + copts = ABSL_DEFAULT_COPTS, + deps = [ + ":debugging_internal", + ":demangle_internal", + "//absl/base", + "//absl/base:core_headers", + "//absl/base:malloc_internal", + ], +) + +cc_test( + name = "symbolize_test", + srcs = ["symbolize_test.cc"], + copts = ABSL_TEST_COPTS, + deps = [ + ":stack_consumption", + ":symbolize", + "//absl/base", + "//absl/base:core_headers", + "//absl/memory", + "@com_google_googletest//:gtest", + ], +) + +cc_library( + name = "examine_stack", + srcs = [ + "internal/examine_stack.cc", + ], + hdrs = [ + "internal/examine_stack.h", + ], + copts = ABSL_DEFAULT_COPTS, + visibility = ["//visibility:private"], + deps = [ + ":stacktrace", + ":symbolize", + "//absl/base", + "//absl/base:core_headers", + ], +) + +cc_library( + name = "failure_signal_handler", + srcs = ["failure_signal_handler.cc"], + hdrs = ["failure_signal_handler.h"], + copts = ABSL_DEFAULT_COPTS, + deps = [ + ":examine_stack", + ":stacktrace", + "//absl/base", + "//absl/base:config", + "//absl/base:core_headers", + ], +) + +cc_test( + name = "failure_signal_handler_test", + srcs = ["failure_signal_handler_test.cc"], + copts = ABSL_TEST_COPTS, + linkopts = select({ + "//absl:windows": [], + "//conditions:default": ["-pthread"], + }), + deps = [ + ":failure_signal_handler", + ":stacktrace", + ":symbolize", + "//absl/base", + "//absl/strings", + "@com_google_googletest//:gtest", + ], +) + +cc_library( + name = "debugging_internal", + srcs = [ + "internal/address_is_readable.cc", + "internal/elf_mem_image.cc", + "internal/vdso_support.cc", + ], + hdrs = [ + "internal/address_is_readable.h", + "internal/elf_mem_image.h", + "internal/stacktrace_aarch64-inl.inc", + "internal/stacktrace_arm-inl.inc", + "internal/stacktrace_config.h", + "internal/stacktrace_generic-inl.inc", + "internal/stacktrace_powerpc-inl.inc", + "internal/stacktrace_unimplemented-inl.inc", + "internal/stacktrace_win32-inl.inc", + "internal/stacktrace_x86-inl.inc", + "internal/vdso_support.h", + ], + copts = ABSL_DEFAULT_COPTS, + deps = [ + "//absl/base", + "//absl/base:dynamic_annotations", + ], +) + +cc_library( + name = "demangle_internal", + srcs = ["internal/demangle.cc"], + hdrs = ["internal/demangle.h"], + copts = ABSL_DEFAULT_COPTS, + deps = [ + "//absl/base", + "//absl/base:core_headers", + ], +) + +cc_test( + name = "demangle_test", + srcs = ["internal/demangle_test.cc"], + copts = ABSL_TEST_COPTS, + deps = [ + ":demangle_internal", + ":stack_consumption", + "//absl/base", + "//absl/base:core_headers", + "//absl/memory", + "@com_google_googletest//:gtest_main", + ], +) + +cc_library( + name = "leak_check", + srcs = select({ + # The leak checking interface depends on weak function + # declarations that may not necessarily have definitions. + # Windows doesn't support this, and ios requires + # guaranteed definitions for weak symbols. + "//absl:ios": [], + "//absl:windows": [], + "//conditions:default": [ + "leak_check.cc", + ], + }), + hdrs = select({ + "//absl:ios": [], + "//absl:windows": [], + "//conditions:default": ["leak_check.h"], + }), + deps = ["//absl/base:core_headers"], +) + +# Adding a dependency to leak_check_disable will disable +# sanitizer leak checking (asan/lsan) in a test without +# the need to mess around with build features. +cc_library( + name = "leak_check_disable", + srcs = ["leak_check_disable.cc"], + linkstatic = 1, + alwayslink = 1, +) + +# These targets exists for use in tests only, explicitly configuring the +# LEAK_SANITIZER macro. It must be linked with -fsanitize=leak for lsan. +ABSL_LSAN_LINKOPTS = select({ + "//absl:llvm_compiler": ["-fsanitize=leak"], + "//conditions:default": [], +}) + +cc_library( + name = "leak_check_api_enabled_for_testing", + testonly = 1, + srcs = ["leak_check.cc"], + hdrs = ["leak_check.h"], + copts = select({ + "//absl:llvm_compiler": ["-DLEAK_SANITIZER"], + "//conditions:default": [], + }), + visibility = ["//visibility:private"], +) + +cc_library( + name = "leak_check_api_disabled_for_testing", + testonly = 1, + srcs = ["leak_check.cc"], + hdrs = ["leak_check.h"], + copts = ["-ULEAK_SANITIZER"], + visibility = ["//visibility:private"], +) + +cc_test( + name = "leak_check_test", + srcs = ["leak_check_test.cc"], + copts = select({ + "//absl:llvm_compiler": ["-DABSL_EXPECT_LEAK_SANITIZER"], + "//conditions:default": [], + }), + linkopts = ABSL_LSAN_LINKOPTS, + deps = [ + ":leak_check_api_enabled_for_testing", + "//absl/base", + "@com_google_googletest//:gtest_main", + ], +) + +cc_test( + name = "leak_check_no_lsan_test", + srcs = ["leak_check_test.cc"], + copts = ["-UABSL_EXPECT_LEAK_SANITIZER"], + deps = [ + ":leak_check_api_disabled_for_testing", + "//absl/base", # for raw_logging + "@com_google_googletest//:gtest_main", + ], +) + +# Test that leak checking is skipped when lsan is enabled but +# ":leak_check_disable" is linked in. +# +# This test should fail in the absence of a dependency on ":leak_check_disable" +cc_test( + name = "disabled_leak_check_test", + srcs = ["leak_check_fail_test.cc"], + linkopts = ABSL_LSAN_LINKOPTS, + deps = [ + ":leak_check_api_enabled_for_testing", + ":leak_check_disable", + "//absl/base", + "@com_google_googletest//:gtest_main", + ], +) + +cc_library( + name = "stack_consumption", + testonly = 1, + srcs = ["internal/stack_consumption.cc"], + hdrs = ["internal/stack_consumption.h"], + copts = ABSL_DEFAULT_COPTS, + visibility = ["//visibility:private"], + deps = [ + "//absl/base", + "//absl/base:core_headers", + ], +) + +cc_test( + name = "stack_consumption_test", + srcs = ["internal/stack_consumption_test.cc"], + copts = ABSL_TEST_COPTS, + deps = [ + ":stack_consumption", + "//absl/base", + "//absl/base:core_headers", + "@com_google_googletest//:gtest_main", + ], +)
