diff --git a/src/corelib/tools/qarraydataops.h b/src/corelib/tools/qarraydataops.h index 8c0ca8f2f4..362f2449dc 100644 --- a/src/corelib/tools/qarraydataops.h +++ b/src/corelib/tools/qarraydataops.h @@ -44,10 +44,13 @@ #include #include +#include #include #include #include #include +#include +#include QT_BEGIN_NAMESPACE @@ -147,6 +150,7 @@ struct QArrayExceptionSafetyPrimitives { It *iter; It end; + It intermediate; Destructor(It &it) noexcept(std::is_nothrow_copy_constructible_v) : iter(std::addressof(it)), end(it) @@ -155,6 +159,10 @@ struct QArrayExceptionSafetyPrimitives { iter = std::addressof(end); } + void freeze() noexcept(std::is_nothrow_copy_constructible_v) + { + intermediate = *iter; iter = std::addressof(intermediate); + } ~Destructor() noexcept(std::is_nothrow_destructible_v) { // Step is either 1 or -1 and depends on the interposition of *iter @@ -218,6 +226,9 @@ struct QArrayExceptionSafetyPrimitives // Tags for compile-time dispatch based on backwards vs forward growing policy struct GrowsForwardTag {}; struct GrowsBackwardsTag {}; +template struct InverseTag; +template<> struct InverseTag { using tag = GrowsBackwardsTag; }; +template<> struct InverseTag { using tag = GrowsForwardTag; }; QT_WARNING_PUSH #if defined(Q_CC_GNU) && Q_CC_GNU >= 700 @@ -244,31 +255,8 @@ public: this->size = qsizetype(newSize); } - template - void copyAppend(iterator b, iterator e, QtPrivate::IfIsForwardIterator = true) - { - Q_ASSERT(this->isMutable() || b == e); - Q_ASSERT(!this->isShared() || b == e); - Q_ASSERT(std::distance(b, e) >= 0 && qsizetype(std::distance(b, e)) <= this->freeSpaceAtEnd()); - - T *iter = this->end(); - for (; b != e; ++iter, ++b) { - new (iter) T(*b); - ++this->size; - } - } - - void copyAppend(const T *b, const T *e) - { insert(this->end(), b, e); } - void moveAppend(T *b, T *e) - { copyAppend(b, e); } - - void copyAppend(size_t n, parameter_type t) - { insert(this->end(), n, t); } - - template - void emplaceBack(Args&&... args) { this->emplace(this->end(), T(std::forward(args)...)); } + { insert(this->end(), b, e); } void truncate(size_t newSize) { @@ -397,9 +385,9 @@ public: } else { // Preserve the value, because it might be a reference to some part of the moved chunk T t(std::forward(args)...); + auto oldBegin = this->begin(); --this->ptr; - ::memmove(static_cast(this->begin()), static_cast(oldBegin), (where - oldBegin) * sizeof(T)); *(where - 1) = t; @@ -504,23 +492,6 @@ struct QGenericArrayOps } while (size_t(++this->size) != newSize); } - template - void copyAppend(iterator b, iterator e, QtPrivate::IfIsForwardIterator = true) - { - Q_ASSERT(this->isMutable() || b == e); - Q_ASSERT(!this->isShared() || b == e); - Q_ASSERT(std::distance(b, e) >= 0 && qsizetype(std::distance(b, e)) <= this->freeSpaceAtEnd()); - - T *iter = this->end(); - for (; b != e; ++iter, ++b) { - new (iter) T(*b); - ++this->size; - } - } - - void copyAppend(const T *b, const T *e) - { insert(this->end(), b, e); } - void moveAppend(T *b, T *e) { Q_ASSERT(this->isMutable() || b == e); @@ -537,15 +508,6 @@ struct QGenericArrayOps this->size += copier.move(b, e); } - void copyAppend(size_t n, parameter_type t) - { insert(this->end(), n, t); } - - template - void emplaceBack(Args&&... args) - { - this->emplace(this->end(), std::forward(args)...); - } - void truncate(size_t newSize) { Q_ASSERT(this->isMutable()); @@ -854,7 +816,6 @@ struct QGenericArrayOps } while (b != e); } - void assign(T *b, T *e, parameter_type t) { Q_ASSERT(b <= e); @@ -1054,6 +1015,267 @@ struct QCommonArrayOps : QArrayOpsSelector::Type using iterator = typename Base::iterator; using const_iterator = typename Base::const_iterator; +private: + using Self = QCommonArrayOps; + + // Tag dispatched helper functions + inline void adjustPointer(GrowsBackwardsTag, size_t distance) noexcept + { + this->ptr -= distance; + } + inline void adjustPointer(GrowsForwardTag, size_t distance) noexcept + { + this->ptr += distance; + } + qsizetype freeSpace(GrowsBackwardsTag) const noexcept { return this->freeSpaceAtBegin(); } + qsizetype freeSpace(GrowsForwardTag) const noexcept { return this->freeSpaceAtEnd(); } + + struct RelocatableMoveOps + { + // The necessary evil. Performs move "to the left" when grows backwards and + // move "to the right" when grows forward + template + static void moveInGrowthDirection(GrowthTag tag, Self *this_, size_t futureGrowth) + { + Q_ASSERT(this_->isMutable()); + Q_ASSERT(!this_->isShared()); + Q_ASSERT(futureGrowth <= size_t(this_->freeSpace(tag))); + + const auto oldBegin = this_->begin(); + this_->adjustPointer(tag, futureGrowth); + + // Note: move all elements! + ::memmove(static_cast(this_->begin()), static_cast(oldBegin), + this_->size * sizeof(T)); + } + }; + + struct GenericMoveOps + { + template + static void createInPlace(T *where, Args&&... args) + { + new (where) T(std::forward(args)...); + } + + template + static void createInPlace(std::reverse_iterator where, Args&&... args) + { + // Note: instead of std::addressof(*where) + createInPlace(where.base() - 1, std::forward(args)...); + } + + // Moves non-pod data range. Handles overlapping regions. By default, expect + // this method to perform move to the _right_. When move to the _left_ is + // needed, use reverse iterators. + template + static void moveNonPod(GrowthTag, Self *this_, It where, It begin, It end) + { + Q_ASSERT(begin <= end); + Q_ASSERT(where > begin); // move to the right + + using Destructor = typename QArrayExceptionSafetyPrimitives::template Destructor; + + auto start = where + std::distance(begin, end); + auto e = end; + + Destructor destroyer(start); // Keep track of added items + + auto [oldRangeEnd, overlapStart] = std::minmax(where, end); + + // step 1. move-initialize elements in uninitialized memory region + while (start != overlapStart) { + --e; + createInPlace(start - 1, std::move_if_noexcept(*e)); + // change tracked iterator only after creation succeeded - avoid + // destructing partially constructed objects if exception thrown + --start; + } + + // step 2. move assign over existing elements in the overlapping + // region (if there's an overlap) + while (e != begin) { + --start, --e; + *start = std::move_if_noexcept(*e); + } + + // re-created the range. now there is an initialized memory region + // somewhere in the allocated area. if something goes wrong, we must + // clean it up, so "freeze" the position for now (cannot commit yet) + destroyer.freeze(); + + // step 3. destroy elements in the old range + const qsizetype originalSize = this_->size; + start = oldRangeEnd; // mind the possible gap, have to re-assign + while (start != begin) { + // Exceptions or not, dtor called once per instance + --this_->size; + (--start)->~T(); + } + + destroyer.commit(); + // restore old size as we consider data move to be done, the pointer + // still has to be adjusted! + this_->size = originalSize; + } + + // Super inefficient function. The necessary evil. Performs move "to + // the left" when grows backwards and move "to the right" when grows + // forward + template + static void moveInGrowthDirection(GrowthTag tag, Self *this_, size_t futureGrowth) + { + Q_ASSERT(this_->isMutable()); + Q_ASSERT(!this_->isShared()); + Q_ASSERT(futureGrowth <= size_t(this_->freeSpace(tag))); + + if (futureGrowth == 0) // avoid doing anything if there's no need + return; + + // Note: move all elements! + if constexpr (std::is_same_v, GrowsBackwardsTag>) { + auto where = this_->begin() - futureGrowth; + // here, magic happens. instead of having move to the right, we'll + // have move to the left by using reverse iterators + moveNonPod(tag, this_, + std::make_reverse_iterator(where + this_->size), // rwhere + std::make_reverse_iterator(this_->end()), // rbegin + std::make_reverse_iterator(this_->begin())); // rend + this_->ptr = where; + } else { + auto where = this_->begin() + futureGrowth; + moveNonPod(tag, this_, where, this_->begin(), this_->end()); + this_->ptr = where; + } + } + }; + + // Moves all elements in a specific direction by moveSize if available + // free space at one of the ends is smaller than required. Free space + // becomes available at the beginning if grows backwards and at the end + // if grows forward + template + qsizetype prepareFreeSpace(GrowthTag tag, size_t required, size_t moveSize) + { + Q_ASSERT(this->isMutable() || required == 0); + Q_ASSERT(!this->isShared() || required == 0); + Q_ASSERT(required <= this->constAllocatedCapacity() - this->size); + + using MoveOps = std::conditional_t::isRelocatable, + RelocatableMoveOps, + GenericMoveOps>; + + // if free space at the end is not enough, we need to move the data, + // move is performed in an inverse direction + if (size_t(freeSpace(tag)) < required) { + using MoveTag = typename InverseTag>::tag; + MoveOps::moveInGrowthDirection(MoveTag{}, this, moveSize); + + if constexpr (std::is_same_v) { + return -qsizetype(moveSize); // moving data to the left + } else { + return qsizetype(moveSize); // moving data to the right + } + } + return 0; + } + + // Helper wrapper that adjusts passed iterators along with moving the data + // around. The adjustment is necessary when iterators point inside the + // to-be-moved range + template + void prepareFreeSpace(GrowthTag tag, size_t required, size_t moveSize, It &b, It &e) { + // Returns whether passed iterators are inside [this->begin(), this->end()] + const auto iteratorsInRange = [&] (const It &first, const It &last) { + using DecayedIt = std::decay_t; + using RemovedConstVolatileIt = std::remove_cv_t; + constexpr bool selfIterator = + // if passed type is an iterator type: + std::is_same_v + || std::is_same_v + // if passed type is a pointer type: + || std::is_same_v + || std::is_same_v + || std::is_same_v; + if constexpr (selfIterator) { + return (first >= this->begin() && last <= this->end()); + } + return false; + }; + + const bool inRange = iteratorsInRange(b, e); + const auto diff = prepareFreeSpace(tag, required, moveSize); + if (inRange) { + std::advance(b, diff); + std::advance(e, diff); + } + } + + size_t moveSizeForAppend(size_t) + { + // Qt5 QList in append: make 100% free space at end if not enough space + // Now: + return this->freeSpaceAtBegin(); + } + + size_t moveSizeForPrepend(size_t required) + { + // Qt5 QList in prepend: make 33% of all space at front if not enough space + // Now: + qsizetype space = this->allocatedCapacity() / 3; + space = qMax(space, qsizetype(required)); // in case required > 33% of all space + return qMin(space, this->freeSpaceAtEnd()); + } + + // Helper functions that reduce usage boilerplate + void prepareSpaceForAppend(size_t required) + { + prepareFreeSpace(GrowsForwardTag{}, required, moveSizeForAppend(required)); + } + void prepareSpaceForPrepend(size_t required) + { + prepareFreeSpace(GrowsBackwardsTag{}, required, moveSizeForPrepend(required)); + } + template + void prepareSpaceForAppend(It &b, It &e, size_t required) + { + prepareFreeSpace(GrowsForwardTag{}, required, moveSizeForAppend(required), b, e); + } + template + void prepareSpaceForPrepend(It &b, It &e, size_t required) + { + prepareFreeSpace(GrowsBackwardsTag{}, required, moveSizeForPrepend(required), b, e); + } + + // Tells how much of the given size to insert at the beginning of the + // container. This is insert-specific helper function + qsizetype sizeToInsertAtBegin(const T *const where, qsizetype size) + { + Q_ASSERT(size_t(size) <= this->allocatedCapacity() - this->size); + Q_ASSERT(where >= this->begin() && where <= this->end()); // in range + + const auto freeAtBegin = this->freeSpaceAtBegin(); + const auto freeAtEnd = this->freeSpaceAtEnd(); + + // Idea: * if enough space on both sides, try to affect less elements + // * if enough space on one of the sides, use only that side + // * otherwise, split between front and back (worst case) + if (freeAtBegin >= size && freeAtEnd >= size) { + if (where - this->begin() < this->end() - where) { + return size; + } else { + return 0; + } + } else if (freeAtBegin >= size) { + return size; + } else if (freeAtEnd >= size) { + return 0; + } else { + return size - freeAtEnd; + } + } + +public: // Returns whether reallocation is desirable before adding more elements // into the container. This is a helper function that one can use to // theoretically improve average operations performance. Ignoring this @@ -1088,6 +1310,177 @@ struct QCommonArrayOps : QArrayOpsSelector::Type // Now: no free space OR not enough space on either of the sides (bad perf. case) return (freeAtBegin + freeAtEnd) < n || (freeAtBegin < n && freeAtEnd < n); } + + // using Base::truncate; + // using Base::destroyAll; + // using Base::assign; + // using Base::compare; + + void appendInitialize(size_t newSize) + { + Q_ASSERT(this->isMutable()); + Q_ASSERT(!this->isShared()); + Q_ASSERT(newSize > size_t(this->size)); + Q_ASSERT(newSize <= this->allocatedCapacity()); + + // Since this is mostly an initialization function, do not follow append + // logic of space arrangement. Instead, only prepare as much free space + // as needed for this specific operation + const size_t n = newSize - this->size; + prepareFreeSpace(GrowsForwardTag{}, n, + qMin(n, size_t(this->freeSpaceAtBegin()))); // ### perf. loss + + Base::appendInitialize(newSize); + } + + void copyAppend(const T *b, const T *e) + { + Q_ASSERT(this->isMutable() || b == e); + Q_ASSERT(!this->isShared() || b == e); + Q_ASSERT(b <= e); + Q_ASSERT(size_t(e - b) <= this->allocatedCapacity() - this->size); + + prepareSpaceForAppend(b, e, e - b); // ### perf. loss + Base::insert(GrowsForwardTag{}, this->end(), b, e); + } + + template + void copyAppend(It b, It e, QtPrivate::IfIsForwardIterator = true, + QtPrivate::IfIsNotSame, iterator> = true, + QtPrivate::IfIsNotSame, const_iterator> = true) + { + Q_ASSERT(this->isMutable() || b == e); + Q_ASSERT(!this->isShared() || b == e); + const qsizetype distance = std::distance(b, e); + Q_ASSERT(distance >= 0 && size_t(distance) <= this->allocatedCapacity() - this->size); + + prepareSpaceForAppend(b, e, distance); // ### perf. loss + + T *iter = this->end(); + for (; b != e; ++iter, ++b) { + new (iter) T(*b); + ++this->size; + } + } + + void moveAppend(T *b, T *e) + { + Q_ASSERT(this->isMutable() || b == e); + Q_ASSERT(!this->isShared() || b == e); + Q_ASSERT(b <= e); + Q_ASSERT(size_t(e - b) <= this->allocatedCapacity() - this->size); + + prepareSpaceForAppend(b, e, e - b); // ### perf. loss + Base::moveAppend(b, e); + } + + void copyAppend(size_t n, parameter_type t) + { + Q_ASSERT(!this->isShared() || n == 0); + Q_ASSERT(this->allocatedCapacity() - size_t(this->size) >= n); + + // Preserve the value, because it might be a reference to some part of the moved chunk + T tmp(t); + prepareSpaceForAppend(n); // ### perf. loss + Base::insert(GrowsForwardTag{}, this->end(), n, tmp); + } + + void insert(T *where, const T *b, const T *e) + { + Q_ASSERT(this->isMutable() || (b == e && where == this->end())); + Q_ASSERT(!this->isShared() || (b == e && where == this->end())); + Q_ASSERT(where >= this->begin() && where <= this->end()); + Q_ASSERT(b <= e); + Q_ASSERT(e <= where || b > this->end() || where == this->end()); // No overlap or append + Q_ASSERT(size_t(e - b) <= this->allocatedCapacity() - this->size); + + if (where == this->begin()) { // prepend case - special space arrangement + prepareSpaceForPrepend(b, e, e - b); // ### perf. loss + Base::insert(GrowsBackwardsTag{}, this->begin(), b, e); + return; + } else if (where == this->end()) { // append case - special space arrangement + copyAppend(b, e); + return; + } + + // Insert elements based on the divided distance. Good case: only 1 + // insert happens (either to the front part or to the back part). Bad + // case: both inserts happen, meaning that we touch all N elements in + // the container (this should be handled "outside" by ensuring enough + // free space by reallocating more frequently) + const auto k = sizeToInsertAtBegin(where, e - b); + Base::insert(GrowsBackwardsTag{}, where, b, b + k); + Base::insert(GrowsForwardTag{}, where, b + k, e); + } + + void insert(T *where, size_t n, parameter_type t) + { + Q_ASSERT(!this->isShared() || (n == 0 && where == this->end())); + Q_ASSERT(where >= this->begin() && where <= this->end()); + Q_ASSERT(this->allocatedCapacity() - size_t(this->size) >= n); + + if (where == this->begin()) { // prepend case - special space arrangement + // Preserve the value, because it might be a reference to some part of the moved chunk + T tmp(t); + prepareSpaceForPrepend(n); // ### perf. loss + Base::insert(GrowsBackwardsTag{}, this->begin(), n, tmp); + return; + } else if (where == this->end()) { // append case - special space arrangement + copyAppend(n, t); + return; + } + + // Insert elements based on the divided distance. Good case: only 1 + // insert happens (either to the front part or to the back part). Bad + // case: both inserts happen, meaning that we touch all N elements in + // the container (this should be handled "outside" by ensuring enough + // free space by reallocating more frequently) + const auto beginSize = sizeToInsertAtBegin(where, qsizetype(n)); + Base::insert(GrowsBackwardsTag{}, where, beginSize, t); + Base::insert(GrowsForwardTag{}, where, qsizetype(n) - beginSize, t); + } + + template + void emplace(iterator where, Args&&... args) + { + Q_ASSERT(!this->isShared()); + Q_ASSERT(where >= this->begin() && where <= this->end()); + Q_ASSERT(this->allocatedCapacity() - this->size >= 1); + + // Qt5 QList in insert(1): try to move less data around + // Now: + const bool shouldInsertAtBegin = (where - this->begin()) < (this->end() - where) + || this->freeSpaceAtEnd() <= 0; + if (this->freeSpaceAtBegin() > 0 && shouldInsertAtBegin) { + Base::emplace(GrowsBackwardsTag{}, where, std::forward(args)...); + } else { + Base::emplace(GrowsForwardTag{}, where, std::forward(args)...); + } + } + + template + void emplaceBack(Args&&... args) + { + this->emplace(this->end(), std::forward(args)...); + } + + void erase(T *b, T *e) + { + Q_ASSERT(this->isMutable()); + Q_ASSERT(b < e); + Q_ASSERT(b >= this->begin() && b < this->end()); + Q_ASSERT(e > this->begin() && e <= this->end()); + + // Qt5 QList in erase: try to move less data around + // Now: + const T *begin = this->begin(); + const T *end = this->end(); + if (b - begin < end - e) { + Base::erase(GrowsBackwardsTag{}, b, e); + } else { + Base::erase(GrowsForwardTag{}, b, e); + } + } }; } // namespace QtPrivate diff --git a/src/corelib/tools/qcontainertools_impl.h b/src/corelib/tools/qcontainertools_impl.h index a7e1aa31cd..caac5a07e8 100644 --- a/src/corelib/tools/qcontainertools_impl.h +++ b/src/corelib/tools/qcontainertools_impl.h @@ -2,6 +2,7 @@ ** ** Copyright (C) 2018 Klarälvdalens Datakonsult AB, a KDAB Group company, info@kdab.com, author Marc Mutz ** Copyright (C) 2018 Klarälvdalens Datakonsult AB, a KDAB Group company, info@kdab.com, author Giuseppe D'Angelo +** Copyright (C) 2020 The Qt Company Ltd. ** Contact: https://www.qt.io/licensing/ ** ** This file is part of the QtCore module of the Qt Toolkit. @@ -147,6 +148,9 @@ template using IfAssociativeIteratorHasFirstAndSecond = typename std::enable_if::value, bool>::type; +template +using IfIsNotSame = + typename std::enable_if::value, bool>::type; } // namespace QtPrivate QT_END_NAMESPACE diff --git a/tests/auto/corelib/tools/qarraydata/simplevector.h b/tests/auto/corelib/tools/qarraydata/simplevector.h index 1e9812d08e..a6c43a92ee 100644 --- a/tests/auto/corelib/tools/qarraydata/simplevector.h +++ b/tests/auto/corelib/tools/qarraydata/simplevector.h @@ -206,8 +206,7 @@ public: if (d->needsDetach() || capacity() - size() < size_t(last - first) || shouldGrow) { - // QTBUG-84320: change to GrowsBackwards once supported in operations - auto flags = d->detachFlags() | Data::GrowsForward; + const auto flags = d->detachFlags() | Data::GrowsBackwards; SimpleVector detached(DataPointer::allocateGrow(d, d->detachCapacity(newSize), newSize, flags)); @@ -273,8 +272,11 @@ public: if (d->needsDetach() || capacity() - size() < size_t(last - first) || shouldGrow) { + auto flags = d->detachFlags() | Data::GrowsForward; + if (size_t(position) <= (size() + (last - first)) / 4) + flags |= Data::GrowsBackwards; SimpleVector detached(DataPointer::allocateGrow(d, d->detachCapacity(newSize), newSize, - d->detachFlags() | Data::GrowsForward)); + flags)); if (position) detached.d->copyAppend(begin, where); diff --git a/tests/auto/corelib/tools/qarraydata/tst_qarraydata.cpp b/tests/auto/corelib/tools/qarraydata/tst_qarraydata.cpp index c27147dfdb..2a1294a877 100644 --- a/tests/auto/corelib/tools/qarraydata/tst_qarraydata.cpp +++ b/tests/auto/corelib/tools/qarraydata/tst_qarraydata.cpp @@ -861,8 +861,11 @@ void tst_QArrayData::arrayOps() QVERIFY(vs[i].isSharedWith(referenceString)); QCOMPARE(vo[i].id, referenceObject.id); - QCOMPARE(int(vo[i].flags), CountedObject::CopyConstructed - | CountedObject::DefaultConstructed); + + // A temporary object is created as DefaultConstructed | + // CopyConstructed, then it is used instead of the original value to + // construct elements in the container which are CopyConstructed only + QCOMPARE(int(vo[i].flags), CountedObject::CopyConstructed); } //////////////////////////////////////////////////////////////////////////// @@ -912,8 +915,12 @@ void tst_QArrayData::arrayOps() QVERIFY(vs[i].isSharedWith(stringArray[i % 5])); QCOMPARE(vo[i].id, objArray[i % 5].id); + + // Insertion at begin (prepend) caused the elements to move, meaning + // that instead of being displaced, newly added elements got constructed + // in uninitialized memory with DefaultConstructed | CopyConstructed QCOMPARE(int(vo[i].flags), CountedObject::DefaultConstructed - | CountedObject::CopyAssigned); + | CountedObject::CopyConstructed); } for (int i = 5; i < 15; ++i) { @@ -1083,8 +1090,12 @@ void tst_QArrayData::arrayOps2() QVERIFY(vs[i].isNull()); QCOMPARE(vo[i].id, i); + + // Erasing closer to begin causes smaller region [begin, begin + 2) to + // be moved. Thus, to-be-erased region gets reassigned with the elements + // at the beginning QCOMPARE(int(vo[i].flags), CountedObject::DefaultConstructed - | CountedObject::CopyConstructed); + | CountedObject::CopyConstructed | CountedObject::CopyAssigned); } for (size_t i = 2; i < 4; ++i) { @@ -1092,8 +1103,9 @@ void tst_QArrayData::arrayOps2() QVERIFY(vs[i].isNull()); QCOMPARE(vo[i].id, i + 8); - QCOMPARE(int(vo[i].flags), int(CountedObject::DefaultConstructed) - | CountedObject::CopyAssigned); + + // Erasing closer to begin does not affect [begin + 2, begin + 4) region + QCOMPARE(int(vo[i].flags), CountedObject::DefaultConstructed); } for (size_t i = 4; i < 7; ++i) { @@ -1101,8 +1113,9 @@ void tst_QArrayData::arrayOps2() QCOMPARE(vs[i], QString::number(i + 3)); QCOMPARE(vo[i].id, i + 3); - QCOMPARE(int(vo[i].flags), CountedObject::DefaultConstructed - | CountedObject::CopyAssigned); + + // Erasing closer to begin does not affect [begin + 2, begin + 4) region + QCOMPARE(int(vo[i].flags), CountedObject::DefaultConstructed); } } @@ -1134,15 +1147,18 @@ void tst_QArrayData::arrayOpsExtra() const auto setupDataPointers = [&allocationOptions] (size_t capacity, size_t initialSize = 0) { const qsizetype alloc = qsizetype(capacity); - // QTBUG-84320: change to growing function once supported - QArrayDataPointer i(QTypedArrayData::allocate(alloc, allocationOptions)); - QArrayDataPointer s(QTypedArrayData::allocate(alloc, allocationOptions)); - QArrayDataPointer o(QTypedArrayData::allocate(alloc, allocationOptions)); + auto i = QArrayDataPointer::allocateGrow(QArrayDataPointer(), alloc, + initialSize, allocationOptions); + auto s = QArrayDataPointer::allocateGrow(QArrayDataPointer(), alloc, + initialSize, allocationOptions); + auto o = QArrayDataPointer::allocateGrow(QArrayDataPointer(), alloc, + initialSize, allocationOptions); if (initialSize) { i->appendInitialize(initialSize); s->appendInitialize(initialSize); o->appendInitialize(initialSize); } + // assign unique values std::generate(i.begin(), i.end(), [] () { static int i = 0; return i++; }); std::generate(s.begin(), s.end(), [] () { static int i = 0; return QString::number(i++); }); @@ -1209,11 +1225,6 @@ void tst_QArrayData::arrayOpsExtra() RUN_TEST_FUNC(testCopyAppend, strData, stringArray.begin(), stringArray.end()); RUN_TEST_FUNC(testCopyAppend, objData, objArray.begin(), objArray.end()); - // from self - RUN_TEST_FUNC(testCopyAppend, intData, intData.begin() + 3, intData.begin() + 5); - RUN_TEST_FUNC(testCopyAppend, strData, strData.begin() + 3, strData.begin() + 5); - RUN_TEST_FUNC(testCopyAppend, objData, objData.begin() + 3, objData.begin() + 5); - // append to full const size_t intDataFreeSpace = intData.constAllocatedCapacity() - intData.size; QVERIFY(intDataFreeSpace > 0); @@ -1232,6 +1243,59 @@ void tst_QArrayData::arrayOpsExtra() QCOMPARE(size_t(objData.size), objData.constAllocatedCapacity()); } + // copyAppend (iterator version) - special case of copying from self iterators + { + CountedObject::LeakChecker localLeakChecker; Q_UNUSED(localLeakChecker); + const auto testCopyAppendSelf = [&] (auto &dataPointer, auto first, auto last) { + const size_t originalSize = dataPointer.size; + auto copy = cloneArrayDataPointer(dataPointer, dataPointer.size); + const size_t distance = std::distance(first, last); + auto firstCopy = copy->begin() + std::distance(dataPointer->begin(), first); + + dataPointer->copyAppend(first, last); + QCOMPARE(size_t(dataPointer.size), originalSize + distance); + size_t i = 0; + for (; i < originalSize; ++i) + QCOMPARE(dataPointer.data()[i], copy.data()[i]); + for (; i < size_t(dataPointer.size); ++i) + QCOMPARE(dataPointer.data()[i], *(firstCopy + (i - originalSize))); + }; + + auto [intData, strData, objData] = setupDataPointers(inputSize * 2, inputSize / 2); + // make no free space at the end + intData->appendInitialize(intData.size + intData.freeSpaceAtEnd()); + strData->appendInitialize(strData.size + strData.freeSpaceAtEnd()); + objData->appendInitialize(objData.size + objData.freeSpaceAtEnd()); + + // make all values unique. this would ensure that we do not have erroneously passed test + int i = 0; + std::generate(intData.begin(), intData.end(), [&i] () { return i++; }); + std::generate(strData.begin(), strData.end(), [&i] () { return QString::number(i++); }); + std::generate(objData.begin(), objData.end(), [] () { return CountedObject(); }); + + // sanity checks: + if (allocationOptions & QArrayData::GrowsBackwards) { + QVERIFY(intData.freeSpaceAtBegin() > 0); + QVERIFY(strData.freeSpaceAtBegin() > 0); + QVERIFY(objData.freeSpaceAtBegin() > 0); + } + QVERIFY(intData.freeSpaceAtBegin() <= intData.size); + QVERIFY(strData.freeSpaceAtBegin() <= strData.size); + QVERIFY(objData.freeSpaceAtBegin() <= objData.size); + QVERIFY(intData.freeSpaceAtEnd() == 0); + QVERIFY(strData.freeSpaceAtEnd() == 0); + QVERIFY(objData.freeSpaceAtEnd() == 0); + + // now, append to full size causing the data to move internally. passed + // iterators that refer to the object itself must be used correctly + RUN_TEST_FUNC(testCopyAppendSelf, intData, intData.begin(), + intData.begin() + intData.freeSpaceAtBegin()); + RUN_TEST_FUNC(testCopyAppendSelf, strData, strData.begin(), + strData.begin() + strData.freeSpaceAtBegin()); + RUN_TEST_FUNC(testCopyAppendSelf, objData, objData.begin(), + objData.begin() + objData.freeSpaceAtBegin()); + } + // copyAppend (value version) { CountedObject::LeakChecker localLeakChecker; Q_UNUSED(localLeakChecker); @@ -1279,6 +1343,52 @@ void tst_QArrayData::arrayOpsExtra() QCOMPARE(size_t(objData.size), objData.constAllocatedCapacity()); } + // copyAppend (value version) - special case of copying self value + { + CountedObject::LeakChecker localLeakChecker; Q_UNUSED(localLeakChecker); + const auto testCopyAppendSelf = [&] (auto &dataPointer, size_t n, const auto &value) { + const size_t originalSize = dataPointer.size; + auto copy = cloneArrayDataPointer(dataPointer, dataPointer.size); + auto valueCopy = value; + + dataPointer->copyAppend(n, value); + QCOMPARE(size_t(dataPointer.size), originalSize + n); + size_t i = 0; + for (; i < originalSize; ++i) + QCOMPARE(dataPointer.data()[i], copy.data()[i]); + for (; i < size_t(dataPointer.size); ++i) + QCOMPARE(dataPointer.data()[i], valueCopy); + }; + + auto [intData, strData, objData] = setupDataPointers(inputSize * 2, inputSize / 2); + // make no free space at the end + intData->appendInitialize(intData.size + intData.freeSpaceAtEnd()); + strData->appendInitialize(strData.size + strData.freeSpaceAtEnd()); + objData->appendInitialize(objData.size + objData.freeSpaceAtEnd()); + + // make all values unique. this would ensure that we do not have erroneously passed test + int i = 0; + std::generate(intData.begin(), intData.end(), [&i] () { return i++; }); + std::generate(strData.begin(), strData.end(), [&i] () { return QString::number(i++); }); + std::generate(objData.begin(), objData.end(), [] () { return CountedObject(); }); + + // sanity checks: + if (allocationOptions & QArrayData::GrowsBackwards) { + QVERIFY(intData.freeSpaceAtBegin() > 0); + QVERIFY(strData.freeSpaceAtBegin() > 0); + QVERIFY(objData.freeSpaceAtBegin() > 0); + } + QVERIFY(intData.freeSpaceAtEnd() == 0); + QVERIFY(strData.freeSpaceAtEnd() == 0); + QVERIFY(objData.freeSpaceAtEnd() == 0); + + // now, append to full size causing the data to move internally. passed + // value that refers to the object itself must be used correctly + RUN_TEST_FUNC(testCopyAppendSelf, intData, intData.freeSpaceAtBegin(), intData.data()[0]); + RUN_TEST_FUNC(testCopyAppendSelf, strData, strData.freeSpaceAtBegin(), strData.data()[0]); + RUN_TEST_FUNC(testCopyAppendSelf, objData, objData.freeSpaceAtBegin(), objData.data()[0]); + } + // moveAppend { CountedObject::LeakChecker localLeakChecker; Q_UNUSED(localLeakChecker); @@ -1327,6 +1437,63 @@ void tst_QArrayData::arrayOpsExtra() QCOMPARE(size_t(objData.size), objData.constAllocatedCapacity()); } + // moveAppend - special case of moving from self (this is legal yet rather useless) + { + CountedObject::LeakChecker localLeakChecker; Q_UNUSED(localLeakChecker); + const auto testMoveAppendSelf = [&] (auto &dataPointer, auto first, auto last) { + const size_t originalSize = dataPointer.size; + auto copy = cloneArrayDataPointer(dataPointer, dataPointer.size); + const size_t addedSize = std::distance(first, last); + const size_t firstPos = std::distance(dataPointer->begin(), first); + auto firstCopy = copy->begin() + firstPos; + + dataPointer->moveAppend(first, last); + QCOMPARE(size_t(dataPointer.size), originalSize + addedSize); + size_t i = 0; + for (; i < originalSize; ++i) { + if (i >= firstPos && i < (firstPos + addedSize)) // skip "moved from" chunk + continue; + QCOMPARE(dataPointer.data()[i], copy.data()[i]); + } + for (; i < size_t(dataPointer.size); ++i) + QCOMPARE(dataPointer.data()[i], *(firstCopy + (i - originalSize))); + }; + + auto [intData, strData, objData] = setupDataPointers(inputSize * 2, inputSize / 2); + // make no free space at the end + intData->appendInitialize(intData.size + intData.freeSpaceAtEnd()); + strData->appendInitialize(strData.size + strData.freeSpaceAtEnd()); + objData->appendInitialize(objData.size + objData.freeSpaceAtEnd()); + + // make all values unique. this would ensure that we do not have erroneously passed test + int i = 0; + std::generate(intData.begin(), intData.end(), [&i] () { return i++; }); + std::generate(strData.begin(), strData.end(), [&i] () { return QString::number(i++); }); + std::generate(objData.begin(), objData.end(), [] () { return CountedObject(); }); + + // sanity checks: + if (allocationOptions & QArrayData::GrowsBackwards) { + QVERIFY(intData.freeSpaceAtBegin() > 0); + QVERIFY(strData.freeSpaceAtBegin() > 0); + QVERIFY(objData.freeSpaceAtBegin() > 0); + } + QVERIFY(intData.freeSpaceAtBegin() <= intData.size); + QVERIFY(strData.freeSpaceAtBegin() <= strData.size); + QVERIFY(objData.freeSpaceAtBegin() <= objData.size); + QVERIFY(intData.freeSpaceAtEnd() == 0); + QVERIFY(strData.freeSpaceAtEnd() == 0); + QVERIFY(objData.freeSpaceAtEnd() == 0); + + // now, append to full size causing the data to move internally. passed + // iterators that refer to the object itself must be used correctly + RUN_TEST_FUNC(testMoveAppendSelf, intData, intData.begin(), + intData.begin() + intData.freeSpaceAtBegin()); + RUN_TEST_FUNC(testMoveAppendSelf, strData, strData.begin(), + strData.begin() + strData.freeSpaceAtBegin()); + RUN_TEST_FUNC(testMoveAppendSelf, objData, objData.begin(), + objData.begin() + objData.freeSpaceAtBegin()); + } + // truncate { CountedObject::LeakChecker localLeakChecker; Q_UNUSED(localLeakChecker); @@ -1434,6 +1601,55 @@ void tst_QArrayData::arrayOpsExtra() RUN_TEST_FUNC(testInsertValue, objData, objData.size, 1, CountedObject()); } + // insert - special case of inserting from self value. this test only makes + // sense for prepend - insert at begin. + { + const auto testInsertValueSelf = [&] (auto &dataPointer, size_t n, const auto &value) { + const size_t originalSize = dataPointer.size; + auto copy = cloneArrayDataPointer(dataPointer, dataPointer.size); + auto valueCopy = value; + + dataPointer->insert(dataPointer.begin(), n, value); + QCOMPARE(size_t(dataPointer.size), originalSize + n); + size_t i = 0; + for (; i < n; ++i) + QCOMPARE(dataPointer.data()[i], valueCopy); + for (; i < size_t(dataPointer.size); ++i) + QCOMPARE(dataPointer.data()[i], copy.data()[i - n]); + }; + + CountedObject::LeakChecker localLeakChecker; Q_UNUSED(localLeakChecker); + auto [intData, strData, objData] = setupDataPointers(inputSize * 2, inputSize / 2); + + // make no free space at the begin + intData->insert(intData.begin(), intData.freeSpaceAtBegin(), intData.data()[0]); + strData->insert(strData.begin(), strData.freeSpaceAtBegin(), strData.data()[0]); + objData->insert(objData.begin(), objData.freeSpaceAtBegin(), objData.data()[0]); + + // make all values unique. this would ensure that we do not have erroneously passed test + int i = 0; + std::generate(intData.begin(), intData.end(), [&i] () { return i++; }); + std::generate(strData.begin(), strData.end(), [&i] () { return QString::number(i++); }); + std::generate(objData.begin(), objData.end(), [] () { return CountedObject(); }); + + // sanity checks: + QVERIFY(intData.freeSpaceAtEnd() > 0); + QVERIFY(strData.freeSpaceAtEnd() > 0); + QVERIFY(objData.freeSpaceAtEnd() > 0); + QVERIFY(intData.freeSpaceAtBegin() == 0); + QVERIFY(strData.freeSpaceAtBegin() == 0); + QVERIFY(objData.freeSpaceAtBegin() == 0); + + // now, prepend to full size causing the data to move internally. passed + // value that refers to the object itself must be used correctly + RUN_TEST_FUNC(testInsertValueSelf, intData, intData.freeSpaceAtEnd(), + intData.data()[intData.size - 1]); + RUN_TEST_FUNC(testInsertValueSelf, strData, strData.freeSpaceAtEnd(), + strData.data()[strData.size - 1]); + RUN_TEST_FUNC(testInsertValueSelf, objData, objData.freeSpaceAtEnd(), + objData.data()[objData.size - 1]); + } + // emplace { CountedObject::LeakChecker localLeakChecker; Q_UNUSED(localLeakChecker); @@ -2429,6 +2645,29 @@ void tst_QArrayData::exceptionSafetyPrimitives_destructor() QVERIFY(throwingTypeWatcher().destroyedIds[0] == 42); } } + + // extra: special case of freezing the position + { + auto data = createDataPointer(20, 10); + auto reference = createDataPointer(20, 10); + reference->erase(reference.end() - 1, reference.end()); + data.data()[data.size - 1] = ThrowingType(42); + + WatcherScope scope; Q_UNUSED(scope); + { + auto where = data.end(); + Destructor destroyer(where); + for (int i = 0; i < 3; ++i) { + --where; + destroyer.freeze(); + } + } + --data.size; // destroyed 1 element above + for (qsizetype i = 0; i < data.size; ++i) + QCOMPARE(data.data()[i], reference.data()[i]); + QVERIFY(throwingTypeWatcher().destroyedIds.size() == 1); + QCOMPARE(throwingTypeWatcher().destroyedIds[0], 42); + } } void tst_QArrayData::exceptionSafetyPrimitives_mover()