Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--source/blender/blenlib/BLI_chunk_list.hh527
-rw-r--r--source/blender/blenlib/BLI_stack.hh250
-rw-r--r--source/blender/blenlib/CMakeLists.txt2
-rw-r--r--source/blender/blenlib/tests/BLI_chunk_list_test.cc68
-rw-r--r--source/blender/blenlib/tests/BLI_stack_cxx_test.cc4
5 files changed, 608 insertions, 243 deletions
diff --git a/source/blender/blenlib/BLI_chunk_list.hh b/source/blender/blenlib/BLI_chunk_list.hh
new file mode 100644
index 00000000000..963e7af90e8
--- /dev/null
+++ b/source/blender/blenlib/BLI_chunk_list.hh
@@ -0,0 +1,527 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+
+#pragma once
+
+/** \file
+ * \ingroup bli
+ *
+ * A `blender::ChunkList` is a dynamically growing ordered container for values of type T.
+ * It is *not* guaranteed that all values will be stored in one contiguous array. Instead, multiple
+ * arrays may be used.
+ *
+ * Comparison to `blender::Vector`:
+ * - `ChunkList` has better performance when appending many elements, because it does not have to
+ * move existing values.
+ * - This also means that `ChunkList` can be used with types that cannot be moved.
+ * - A `ChunkList` can not be indexed efficiently. So while the container is ordered, one can not
+ * efficiently get the value at a specific index.
+ * - Iterating over a `ChunkList` is a little bit slower, because it may have to iterate over
+ * multiple arrays. That is likely negligible in most cases.
+ *
+ * `ChunkList` should be used instead of `Vector` when the following two statements are true:
+ * - The elements do not have to be in a contiguous array.
+ * - The elements do not have to be accessed with an index.
+ */
+
+#include "BLI_allocator.hh"
+#include "BLI_math_base.hh"
+#include "BLI_memory_utils.hh"
+#include "BLI_vector.hh"
+
+namespace blender {
+
+namespace chunk_list_detail {
+
+template<typename T> struct RawChunk {
+ T *begin;
+ T *end_if_inactive;
+ T *capacity_end;
+ /**
+ * Pointer to the beginning of the allocation of this chunk. Might not be the same as `begin`,
+ * because the might be allocated together with #AllocInfo. Can be null when this chunk is
+ * inlined into the #ChunkList.
+ */
+ void *allocation;
+
+ RawChunk() = default;
+ RawChunk(T *begin, T *end_if_inactive, T *capacity_end, void *allocation)
+ : begin(begin),
+ end_if_inactive(end_if_inactive),
+ capacity_end(capacity_end),
+ allocation(allocation)
+ {
+ }
+};
+
+template<typename T> struct AllocInfo {
+ int64_t active;
+ Vector<RawChunk<T>> raw_chunks;
+};
+
+} // namespace chunk_list_detail
+
+template<typename T,
+ int64_t InlineBufferCapacity = default_inline_buffer_capacity(sizeof(T)),
+ typename Allocator = GuardedAllocator>
+class ChunkList {
+ private:
+ using RawChunk = chunk_list_detail::RawChunk<T>;
+ using AllocInfo = chunk_list_detail::AllocInfo<T>;
+
+ T *active_begin_;
+ T *active_end_;
+ T *active_capacity_end_;
+ AllocInfo *alloc_info_ = nullptr;
+ BLI_NO_UNIQUE_ADDRESS TypedBuffer<T, InlineBufferCapacity> inline_buffer_;
+ BLI_NO_UNIQUE_ADDRESS Allocator allocator_;
+
+ template<typename OtherT, int64_t OtherInlineBufferCapacity, typename OtherAllocator>
+ friend class ChunkList;
+
+ public:
+ ChunkList(Allocator allocator = {}) noexcept : allocator_(allocator)
+ {
+ active_begin_ = inline_buffer_;
+ active_end_ = active_begin_;
+ active_capacity_end_ = active_end_ + InlineBufferCapacity;
+ }
+
+ ChunkList(NoExceptConstructor, Allocator allocator = {}) noexcept : ChunkList(allocator)
+ {
+ }
+
+ ChunkList(const ChunkList &other) : ChunkList(other.allocator())
+ {
+ this->extend(other);
+ }
+
+ template<int64_t OtherInlineBufferCapacity>
+ ChunkList(const ChunkList<T, OtherInlineBufferCapacity, Allocator> &other)
+ : ChunkList(other.allocator())
+ {
+ this->extend(other);
+ }
+
+ ChunkList(ChunkList &&other) : ChunkList(other.allocator())
+ {
+ this->extend(std::move(other));
+ }
+
+ ~ChunkList()
+ {
+ if (alloc_info_ == nullptr) {
+ destruct_n<T>(inline_buffer_, active_end_ - inline_buffer_);
+ }
+ else {
+ for (const int64_t i : alloc_info_->raw_chunks.index_range()) {
+ RawChunk &raw_chunk = alloc_info_->raw_chunks[i];
+ T *begin = raw_chunk.begin;
+ T *end = i == alloc_info_->active ? active_end_ : raw_chunk.end_if_inactive;
+ destruct_n(begin, end - begin);
+ if (i >= 1) {
+ allocator_.deallocate(begin);
+ }
+ }
+ alloc_info_->~AllocInfo();
+ allocator_.deallocate(alloc_info_);
+ }
+ }
+
+ ChunkList &operator=(const ChunkList &other)
+ {
+ return copy_assign_container(*this, other);
+ }
+
+ ChunkList &operator=(ChunkList &&other)
+ {
+ return move_assign_container(*this, std::move(other));
+ }
+
+ void clear()
+ {
+ this->~ChunkList();
+ new (this) ChunkList();
+ }
+
+ const Allocator &allocator() const
+ {
+ return allocator_;
+ }
+
+ template<typename Fn> void foreach_chunk(Fn &&fn) const
+ {
+ for (const int64_t i : IndexRange(this->get_chunk_num())) {
+ const Span<T> chunk = this->get_chunk(i);
+ fn(chunk);
+ }
+ }
+
+ template<typename Fn> void foreach_chunk(Fn &&fn)
+ {
+ const_cast<const ChunkList *>(this)->foreach_chunk([&](const Span<T> chunk) {
+ fn(MutableSpan(const_cast<T *>(chunk.data()), chunk.size()));
+ });
+ }
+
+ template<typename Fn> void foreach_elem(Fn &&fn) const
+ {
+ for (const int64_t i : IndexRange(this->get_chunk_num())) {
+ const Span<T> chunk = this->get_chunk(i);
+ for (const T &value : chunk) {
+ fn(value);
+ }
+ }
+ }
+
+ bool is_empty() const
+ {
+ return active_end_ == inline_buffer_;
+ }
+
+ int64_t size() const
+ {
+ int64_t chunk_size_sum = 0;
+ this->foreach_chunk([&](const Span<T> chunk) { chunk_size_sum += chunk.size(); });
+ return chunk_size_sum;
+ }
+
+ int64_t get_chunk_num() const
+ {
+ if (alloc_info_ == nullptr) {
+ return 1;
+ }
+ return alloc_info_->active + 1;
+ }
+
+ BLI_NOINLINE Span<T> get_chunk(const int64_t index) const
+ {
+ BLI_assert(index >= 0);
+ if (alloc_info_ == nullptr) {
+ BLI_assert(index == 0);
+ return {inline_buffer_, active_end_ - inline_buffer_};
+ }
+ BLI_assert(index <= alloc_info_->active);
+ const RawChunk &chunk = alloc_info_->raw_chunks[index];
+ const T *begin = chunk.begin;
+ const T *end = index == alloc_info_->active ? active_end_ : chunk.end_if_inactive;
+ return {begin, end - begin};
+ }
+
+ MutableSpan<T> get_chunk(const int64_t index)
+ {
+ const Span<T> chunk = const_cast<const ChunkList *>(this)->get_chunk(index);
+ return {const_cast<T *>(chunk.data()), chunk.size()};
+ }
+
+ T &last()
+ {
+ BLI_assert(!this->is_empty());
+ return *(active_end_ - 1);
+ }
+ const T &last() const
+ {
+ BLI_assert(!this->is_empty());
+ return *(active_end_ - 1);
+ }
+
+ void append(const T &value)
+ {
+ this->append_as(value);
+ }
+ void append(T &&value)
+ {
+ this->append_as(std::move(value));
+ }
+ template<typename... Args> void append_as(Args &&...args)
+ {
+ this->ensure_space_for_one();
+ BLI_assert(active_end_ < active_capacity_end_);
+ try {
+ new (active_end_) T(std::forward<Args>(args)...);
+ }
+ catch (...) {
+ this->move_end_back_to_prev_element();
+ throw;
+ }
+ active_end_++;
+ }
+
+ template<int64_t OtherInlineBufferCapacity>
+ void extend(const ChunkList<T, OtherInlineBufferCapacity, Allocator> &other)
+ {
+ other.foreach_chunk([&](const Span<T> chunk) { this->extend(chunk); });
+ }
+
+ template<int64_t OtherInlineBufferCapacity>
+ void extend(ChunkList<T, OtherInlineBufferCapacity, Allocator> &&other)
+ {
+ AllocInfo *other_alloc_info = other.alloc_info_;
+
+ if (other_alloc_info == nullptr) {
+ /* Handle case when the other list is fully inline. */
+ this->extend_move({other.active_begin_, other.active_end_ - other.active_begin_});
+ }
+ else {
+ /* Make sure all chunks are up to date. */
+ RawChunk &other_active_chunk = other_alloc_info->raw_chunks[other_alloc_info->active];
+ other_active_chunk.end_if_inactive = other.active_end_;
+ for (const int64_t other_chunk_index : other_alloc_info->raw_chunks.index_range()) {
+ const RawChunk &other_chunk = other_alloc_info->raw_chunks[other_chunk_index];
+ if (other_chunk.allocation == nullptr) {
+ /* This chunk is inline. */
+ this->extend_move({other_chunk.begin, other_chunk.end_if_inactive - other_chunk.begin});
+ }
+ else if (alloc_info_ == nullptr) {
+ alloc_info_ = other_alloc_info;
+ other.alloc_info_ = nullptr;
+ RawChunk &first_chunk = alloc_info_->raw_chunks[0];
+ BLI_assert(first_chunk.allocation == nullptr);
+ first_chunk.begin = active_begin_;
+ first_chunk.capacity_end = active_capacity_end_;
+ first_chunk.end_if_inactive = active_end_;
+ break;
+ }
+ else {
+ alloc_info_->raw_chunks.append(other_chunk);
+ if (other_chunk.begin < other_chunk.end_if_inactive) {
+ alloc_info_->active = alloc_info_->raw_chunks.size() - 1;
+ }
+ }
+ }
+
+ if (alloc_info_ != nullptr) {
+ const RawChunk &active_chunk = alloc_info_->raw_chunks[alloc_info_->active];
+ active_begin_ = active_chunk.begin;
+ active_end_ = active_chunk.end_if_inactive;
+ active_capacity_end_ = active_chunk.capacity_end;
+ }
+ if (other.alloc_info_ != nullptr) {
+ other.alloc_info_->raw_chunks.resize(1);
+ }
+ }
+
+ /* Reset the other list. */
+ other.active_begin_ = other.inline_buffer_;
+ other.active_end_ = other.active_begin_;
+ other.active_capacity_end_ = other.active_begin_ + OtherInlineBufferCapacity;
+ }
+
+ void extend_move(const MutableSpan<T> values)
+ {
+ this->extend_impl<true>(values);
+ }
+
+ void extend(const Span<T> values)
+ {
+ this->extend_impl<false>(values);
+ }
+
+ template<bool UseMove> void extend_impl(const Span<T> values)
+ {
+ const T *src_begin = values.data();
+ const T *src_end = src_begin + values.size();
+ const T *src = src_begin;
+ while (src < src_end) {
+ const int64_t remaining_copies = src_end - src;
+ const int64_t remaining_capacity = active_capacity_end_ - active_end_;
+ const int64_t copy_num = std::min(remaining_copies, remaining_capacity);
+ try {
+ if constexpr (UseMove) {
+ uninitialized_move_n(const_cast<T *>(src), copy_num, active_end_);
+ }
+ else {
+ uninitialized_copy_n(src, copy_num, active_end_);
+ }
+ }
+ catch (...) {
+ if (alloc_info_ != nullptr) {
+ int64_t remaining_destructs = src - src_begin;
+ while (remaining_destructs > 0) {
+ this->move_end_back_to_prev_element();
+ const int64_t chunk_size = active_end_ - active_begin_;
+ const int64_t destruct_num = std::min(chunk_size, remaining_destructs);
+ destruct_n(active_end_ - destruct_num, destruct_num);
+ active_end_ -= destruct_num;
+ remaining_destructs -= destruct_num;
+ }
+ }
+ throw;
+ }
+ active_end_ += copy_num;
+
+ if (copy_num == remaining_copies) {
+ break;
+ }
+ src += copy_num;
+ this->activate_next_chunk();
+ }
+ }
+
+ T pop_last()
+ {
+ BLI_assert(!this->is_empty());
+ T value = std::move(*(active_end_ - 1));
+ active_end_--;
+ std::destroy_at(active_end_);
+ this->move_end_back_to_prev_element();
+ return value;
+ }
+
+ void move_end_back_to_prev_element()
+ {
+ if (active_end_ > active_begin_) {
+ return;
+ }
+ if (alloc_info_ == nullptr) {
+ return;
+ }
+ if (alloc_info_->active == 0) {
+ return;
+ }
+ RawChunk &old_chunk = alloc_info_->raw_chunks[alloc_info_->active];
+ old_chunk.end_if_inactive = active_end_;
+ int new_active = alloc_info_->active - 1;
+ while (new_active >= 0) {
+ RawChunk &chunk = alloc_info_->raw_chunks[new_active];
+ if (chunk.begin < chunk.end_if_inactive) {
+ break;
+ }
+ new_active--;
+ }
+ RawChunk &new_chunk = alloc_info_->raw_chunks[new_active];
+ alloc_info_->active = new_active;
+ active_begin_ = new_chunk.begin;
+ active_end_ = new_chunk.end_if_inactive;
+ active_capacity_end_ = new_chunk.capacity_end;
+ }
+
+ class Iterator {
+ private:
+ using iterator_category = std::forward_iterator_tag;
+ using value_type = T;
+ using pointer = const T *;
+ using reference = const T &;
+
+ const T *begin_;
+ const T *current_;
+ const T *end_;
+ int64_t chunk_index_;
+ int64_t chunk_num_;
+ const ChunkList *chunk_list_;
+
+ friend ChunkList;
+
+ public:
+ constexpr Iterator &operator++()
+ {
+ BLI_assert(chunk_index_ < chunk_num_);
+ current_++;
+ if (current_ == end_) {
+ chunk_index_++;
+ if (chunk_index_ < chunk_num_) {
+ const Span<T> next_chunk = chunk_list_->get_chunk(chunk_index_);
+ begin_ = next_chunk.data();
+ current_ = begin_;
+ end_ = begin_ + next_chunk.size();
+ }
+ }
+ return *this;
+ }
+
+ constexpr friend bool operator!=(const Iterator &a, const Iterator &b)
+ {
+ BLI_assert(a.chunk_list_ == b.chunk_list_);
+ return a.current_ != b.current_;
+ }
+
+ constexpr const T &operator*() const
+ {
+ return *current_;
+ }
+ };
+
+ Iterator begin() const
+ {
+ const int64_t span_num = this->get_chunk_num();
+ BLI_assert(span_num >= 1);
+ const Span<T> span = this->get_chunk(0);
+ Iterator it;
+ it.chunk_list_ = this;
+ it.chunk_index_ = 0;
+ it.chunk_num_ = span_num;
+ it.begin_ = span.data();
+ it.end_ = it.begin_ + span.size();
+ it.current_ = it.begin_;
+ return it;
+ }
+
+ Iterator end() const
+ {
+ const int64_t span_num = this->get_chunk_num();
+ BLI_assert(span_num >= 1);
+ const Span<T> last_span = this->get_chunk(span_num - 1);
+
+ Iterator it;
+ it.chunk_list_ = this;
+ it.chunk_index_ = span_num;
+ it.chunk_num_ = span_num;
+ it.begin_ = last_span.data();
+ it.end_ = it.begin_ + last_span.size();
+ it.current_ = it.end_;
+ return it;
+ }
+
+ private:
+ void ensure_space_for_one()
+ {
+ if (active_end_ >= active_capacity_end_) {
+ this->activate_next_chunk();
+ }
+ }
+
+ void activate_next_chunk()
+ {
+ if (alloc_info_ == nullptr) {
+ this->prepare_alloc_info();
+ }
+
+ RawChunk &old_active_chunk = alloc_info_->raw_chunks[alloc_info_->active];
+ old_active_chunk.end_if_inactive = active_end_;
+ BLI_assert(old_active_chunk.capacity_end == active_capacity_end_);
+
+ alloc_info_->active++;
+ if (alloc_info_->active == alloc_info_->raw_chunks.size()) {
+ this->add_chunk(1);
+ }
+
+ RawChunk &new_active_chunk = alloc_info_->raw_chunks[alloc_info_->active];
+ active_begin_ = new_active_chunk.begin;
+ active_end_ = new_active_chunk.end_if_inactive;
+ active_capacity_end_ = new_active_chunk.capacity_end;
+ }
+
+ BLI_NOINLINE void prepare_alloc_info()
+ {
+ BLI_assert(alloc_info_ == nullptr);
+ void *buffer = allocator_.allocate(sizeof(AllocInfo), alignof(AllocInfo), __func__);
+ alloc_info_ = new (buffer) AllocInfo();
+ alloc_info_->raw_chunks.append_as(inline_buffer_, active_end_, active_capacity_end_, nullptr);
+ alloc_info_->active = 0;
+ }
+
+ BLI_NOINLINE void add_chunk(const int64_t min_chunk_size)
+ {
+ const RawChunk &last_chunk = alloc_info_->raw_chunks.last();
+ const int64_t last_chunk_size = last_chunk.capacity_end - last_chunk.begin;
+ const int64_t new_chunk_size = std::max<int64_t>(min_chunk_size,
+ std::min<int64_t>(last_chunk_size * 2, 4096));
+ void *buffer = allocator_.allocate(
+ sizeof(T) * static_cast<size_t>(new_chunk_size), alignof(T), __func__);
+ T *new_chunk_begin = static_cast<T *>(buffer);
+ T *new_chunk_capacity_end = new_chunk_begin + new_chunk_size;
+ alloc_info_->raw_chunks.append_as(
+ new_chunk_begin, new_chunk_begin, new_chunk_capacity_end, buffer);
+ }
+};
+
+} // namespace blender
diff --git a/source/blender/blenlib/BLI_stack.hh b/source/blender/blenlib/BLI_stack.hh
index ed123f43a6b..67e266a3956 100644
--- a/source/blender/blenlib/BLI_stack.hh
+++ b/source/blender/blenlib/BLI_stack.hh
@@ -26,31 +26,12 @@
*/
#include "BLI_allocator.hh"
+#include "BLI_chunk_list.hh"
#include "BLI_memory_utils.hh"
#include "BLI_span.hh"
namespace blender {
-/**
- * A StackChunk references a contiguous memory buffer. Multiple StackChunk instances are linked in
- * a double linked list.
- */
-template<typename T> struct StackChunk {
- /** The below chunk contains the elements that have been pushed on the stack before. */
- StackChunk *below;
- /** The above chunk contains the elements that have been pushed on the stack afterwards. */
- StackChunk *above;
- /** Pointer to the first element of the referenced buffer. */
- T *begin;
- /** Pointer to one element past the end of the referenced buffer. */
- T *capacity_end;
-
- int64_t capacity() const
- {
- return capacity_end - begin;
- }
-};
-
template<
/** Type of the elements that are stored in the stack. */
typename T,
@@ -75,52 +56,14 @@ class Stack {
using size_type = int64_t;
private:
- using Chunk = StackChunk<T>;
-
- /**
- * Points to one element after top-most value in the stack.
- *
- * Invariant:
- * If size_ == 0
- * then: top_ == inline_chunk_.begin
- * else: &peek() == top_ - 1;
- */
- T *top_;
-
- /** Points to the chunk that references the memory pointed to by top_. */
- Chunk *top_chunk_;
-
- /**
- * Number of elements in the entire stack. The sum of initialized element counts in the chunks.
- */
- int64_t size_;
-
- /** The buffer used to implement small object optimization. */
- BLI_NO_UNIQUE_ADDRESS TypedBuffer<T, InlineBufferCapacity> inline_buffer_;
-
- /**
- * A chunk referencing the inline buffer. This is always the bottom-most chunk.
- * So inline_chunk_.below == nullptr.
- */
- Chunk inline_chunk_;
-
- /** Used for allocations when the inline buffer is not large enough. */
- BLI_NO_UNIQUE_ADDRESS Allocator allocator_;
+ ChunkList<T, InlineBufferCapacity, Allocator> list_;
public:
/**
* Initialize an empty stack. No heap allocation is done.
*/
- Stack(Allocator allocator = {}) noexcept : allocator_(allocator)
+ Stack(Allocator allocator = {}) noexcept : list_(allocator)
{
- inline_chunk_.below = nullptr;
- inline_chunk_.above = nullptr;
- inline_chunk_.begin = inline_buffer_;
- inline_chunk_.capacity_end = inline_buffer_ + InlineBufferCapacity;
-
- top_ = inline_buffer_;
- top_chunk_ = &inline_chunk_;
- size_ = 0;
}
Stack(NoExceptConstructor, Allocator allocator = {}) noexcept : Stack(allocator)
@@ -150,63 +93,6 @@ class Stack {
{
}
- Stack(const Stack &other) : Stack(NoExceptConstructor(), other.allocator_)
- {
- for (const Chunk *chunk = &other.inline_chunk_; chunk; chunk = chunk->above) {
- const T *begin = chunk->begin;
- const T *end = (chunk == other.top_chunk_) ? other.top_ : chunk->capacity_end;
- this->push_multiple(Span<T>(begin, end - begin));
- }
- }
-
- Stack(Stack &&other) noexcept(std::is_nothrow_move_constructible_v<T>)
- : Stack(NoExceptConstructor(), other.allocator_)
- {
- uninitialized_relocate_n<T>(
- other.inline_buffer_, std::min(other.size_, InlineBufferCapacity), inline_buffer_);
-
- inline_chunk_.above = other.inline_chunk_.above;
- size_ = other.size_;
-
- if (inline_chunk_.above != nullptr) {
- inline_chunk_.above->below = &inline_chunk_;
- }
-
- if (size_ <= InlineBufferCapacity) {
- top_chunk_ = &inline_chunk_;
- top_ = inline_buffer_ + size_;
- }
- else {
- top_chunk_ = other.top_chunk_;
- top_ = other.top_;
- }
-
- other.size_ = 0;
- other.inline_chunk_.above = nullptr;
- other.top_chunk_ = &other.inline_chunk_;
- other.top_ = other.top_chunk_->begin;
- }
-
- ~Stack()
- {
- this->destruct_all_elements();
- Chunk *above_chunk;
- for (Chunk *chunk = inline_chunk_.above; chunk; chunk = above_chunk) {
- above_chunk = chunk->above;
- allocator_.deallocate(chunk);
- }
- }
-
- Stack &operator=(const Stack &other)
- {
- return copy_assign_container(*this, other);
- }
-
- Stack &operator=(Stack &&other)
- {
- return move_assign_container(*this, std::move(other));
- }
-
/**
* Add a new element to the top of the stack.
*/
@@ -221,18 +107,7 @@ class Stack {
/* This is similar to `std::stack::emplace`. */
template<typename... ForwardT> void push_as(ForwardT &&...value)
{
- if (top_ == top_chunk_->capacity_end) {
- this->activate_next_chunk(1);
- }
- try {
- new (top_) T(std::forward<ForwardT>(value)...);
- top_++;
- size_++;
- }
- catch (...) {
- this->move_top_pointer_back_to_below_chunk();
- throw;
- }
+ list_.append_as(std::forward<ForwardT>(value)...);
}
/**
@@ -241,19 +116,7 @@ class Stack {
*/
T pop()
{
- BLI_assert(size_ > 0);
- T value = std::move(*(top_ - 1));
- top_--;
- top_->~T();
- size_--;
-
- if (top_ == top_chunk_->begin) {
- if (top_chunk_->below != nullptr) {
- top_chunk_ = top_chunk_->below;
- top_ = top_chunk_->capacity_end;
- }
- }
- return value;
+ return list_.pop_last();
}
/**
@@ -262,15 +125,11 @@ class Stack {
*/
T &peek()
{
- BLI_assert(size_ > 0);
- BLI_assert(top_ > top_chunk_->begin);
- return *(top_ - 1);
+ return list_.last();
}
const T &peek() const
{
- BLI_assert(size_ > 0);
- BLI_assert(top_ > top_chunk_->begin);
- return *(top_ - 1);
+ return list_.last();
}
/**
@@ -280,26 +139,7 @@ class Stack {
*/
void push_multiple(Span<T> values)
{
- Span<T> remaining_values = values;
- while (!remaining_values.is_empty()) {
- if (top_ == top_chunk_->capacity_end) {
- this->activate_next_chunk(remaining_values.size());
- }
-
- const int64_t remaining_capacity = top_chunk_->capacity_end - top_;
- const int64_t amount = std::min(remaining_values.size(), remaining_capacity);
- try {
- uninitialized_copy_n(remaining_values.data(), amount, top_);
- }
- catch (...) {
- this->move_top_pointer_back_to_below_chunk();
- throw;
- }
- top_ += amount;
- size_ += amount;
-
- remaining_values = remaining_values.drop_front(amount);
- }
+ list_.extend(values);
}
/**
@@ -307,7 +147,7 @@ class Stack {
*/
bool is_empty() const
{
- return size_ == 0;
+ return list_.is_empty();
}
/**
@@ -315,7 +155,7 @@ class Stack {
*/
int64_t size() const
{
- return size_;
+ return list_.size();
}
/**
@@ -324,75 +164,7 @@ class Stack {
*/
void clear()
{
- this->destruct_all_elements();
- top_chunk_ = &inline_chunk_;
- top_ = top_chunk_->begin;
- }
-
- /* This should only be called by unit tests. */
- bool is_invariant_maintained() const
- {
- if (size_ == 0) {
- return top_ == inline_chunk_.begin;
- }
- return top_ > top_chunk_->begin;
- }
-
- private:
- /**
- * Changes top_chunk_ to point to a new chunk that is above the current one. The new chunk might
- * be smaller than the given size_hint. This happens when a chunk that has been allocated before
- * is reused. The size of the new chunk will be at least one.
- *
- * This invokes undefined behavior when the currently active chunk is not full.
- */
- void activate_next_chunk(const int64_t size_hint)
- {
- BLI_assert(top_ == top_chunk_->capacity_end);
- if (top_chunk_->above == nullptr) {
- const int64_t new_capacity = std::max(size_hint, top_chunk_->capacity() * 2 + 10);
-
- /* Do a single memory allocation for the Chunk and the array it references. */
- void *buffer = allocator_.allocate(
- sizeof(Chunk) + sizeof(T) * new_capacity + alignof(T), alignof(Chunk), AT);
- void *chunk_buffer = buffer;
- void *data_buffer = reinterpret_cast<void *>(
- (reinterpret_cast<uintptr_t>(buffer) + sizeof(Chunk) + alignof(T) - 1) &
- ~(alignof(T) - 1));
-
- Chunk *new_chunk = new (chunk_buffer) Chunk();
- new_chunk->begin = static_cast<T *>(data_buffer);
- new_chunk->capacity_end = new_chunk->begin + new_capacity;
- new_chunk->above = nullptr;
- new_chunk->below = top_chunk_;
- top_chunk_->above = new_chunk;
- }
- top_chunk_ = top_chunk_->above;
- top_ = top_chunk_->begin;
- }
-
- void move_top_pointer_back_to_below_chunk()
- {
- /* This makes sure that the invariant stays intact after a failed push. */
- if (size_ == 0) {
- top_ = inline_chunk_.begin;
- }
- else if (top_ == top_chunk_->begin) {
- top_chunk_ = top_chunk_->below;
- top_ = top_chunk_->capacity_end;
- }
- }
-
- void destruct_all_elements()
- {
- for (T *value = top_chunk_->begin; value != top_; value++) {
- value->~T();
- }
- for (Chunk *chunk = top_chunk_->below; chunk; chunk = chunk->below) {
- for (T *value = chunk->begin; value != chunk->capacity_end; value++) {
- value->~T();
- }
- }
+ list_.clear();
}
};
diff --git a/source/blender/blenlib/CMakeLists.txt b/source/blender/blenlib/CMakeLists.txt
index 470ffebcad4..a82a86183d0 100644
--- a/source/blender/blenlib/CMakeLists.txt
+++ b/source/blender/blenlib/CMakeLists.txt
@@ -332,6 +332,7 @@ set(SRC
BLI_uvproject.h
BLI_vector.hh
BLI_vector_adaptor.hh
+ BLI_chunk_list.hh
BLI_vector_set.hh
BLI_vector_set_slots.hh
BLI_virtual_array.hh
@@ -443,6 +444,7 @@ if(WITH_GTESTS)
tests/BLI_bit_vector_test.cc
tests/BLI_bitmap_test.cc
tests/BLI_bounds_test.cc
+ tests/BLI_chunk_list_test.cc
tests/BLI_color_test.cc
tests/BLI_cpp_type_test.cc
tests/BLI_delaunay_2d_test.cc
diff --git a/source/blender/blenlib/tests/BLI_chunk_list_test.cc b/source/blender/blenlib/tests/BLI_chunk_list_test.cc
new file mode 100644
index 00000000000..7aadf680ba7
--- /dev/null
+++ b/source/blender/blenlib/tests/BLI_chunk_list_test.cc
@@ -0,0 +1,68 @@
+/* SPDX-License-Identifier: Apache-2.0 */
+
+#include "BLI_chunk_list.hh"
+#include "BLI_timeit.hh"
+
+#include "testing/testing.h"
+
+namespace blender::tests {
+
+TEST(chunk_list, Test)
+{
+ const int64_t amount = 3;
+ for ([[maybe_unused]] const int64_t iter : IndexRange(5)) {
+ {
+ ChunkList<int, 2> list;
+ {
+ SCOPED_TIMER("chunk list: create");
+ for (const int64_t i : IndexRange(amount)) {
+ list.append(i);
+ }
+ }
+ int sum = 0;
+ {
+ SCOPED_TIMER("chunk list: sum");
+ // list.foreach_elem([&](const int v) { sum += v; });
+ for (const int v : list) {
+ sum += v;
+ }
+ }
+ std::cout << "Sum: " << sum << "\n";
+ }
+ {
+ Vector<int, 2> vec;
+ {
+
+ SCOPED_TIMER("vector: create");
+ for (const int64_t i : IndexRange(amount)) {
+ vec.append(i);
+ }
+ }
+ int sum = 0;
+ {
+ SCOPED_TIMER("vector: sum");
+ for (const int v : vec) {
+ sum += v;
+ }
+ }
+ std::cout << "Sum: " << sum << "\n";
+ }
+ }
+}
+
+TEST(chunk_list, Stack)
+{
+ ChunkList<int64_t> list;
+ const int64_t amount = 1e5;
+ for (const int64_t i : IndexRange(amount)) {
+ list.append(i);
+ }
+ EXPECT_EQ(list.size(), amount);
+ for (const int64_t i : IndexRange(amount)) {
+ const int popped_value = list.pop_last();
+ EXPECT_EQ(popped_value, amount - i - 1);
+ }
+ EXPECT_EQ(list.size(), 0);
+}
+
+} // namespace blender::tests
diff --git a/source/blender/blenlib/tests/BLI_stack_cxx_test.cc b/source/blender/blenlib/tests/BLI_stack_cxx_test.cc
index 0707759666d..8abf8d2314c 100644
--- a/source/blender/blenlib/tests/BLI_stack_cxx_test.cc
+++ b/source/blender/blenlib/tests/BLI_stack_cxx_test.cc
@@ -223,7 +223,6 @@ TEST(stack, PushExceptions)
EXPECT_EQ(stack.size(), 2);
ExceptionThrower *ptr2 = &stack.peek();
EXPECT_EQ(ptr1, ptr2);
- EXPECT_TRUE(stack.is_invariant_maintained());
}
TEST(stack, PopExceptions)
@@ -235,7 +234,6 @@ TEST(stack, PopExceptions)
stack.pop(); /* NOLINT: bugprone-throw-keyword-missing */
EXPECT_ANY_THROW({ stack.pop(); }); /* NOLINT: bugprone-throw-keyword-missing */
EXPECT_EQ(stack.size(), 1);
- EXPECT_TRUE(stack.is_invariant_maintained());
}
TEST(stack, PushMultipleExceptions)
@@ -245,9 +243,7 @@ TEST(stack, PushMultipleExceptions)
std::array<ExceptionThrower, 100> values;
values[6].throw_during_copy = true;
EXPECT_ANY_THROW({ stack.push_multiple(values); });
- EXPECT_TRUE(stack.is_invariant_maintained());
EXPECT_ANY_THROW({ stack.push_multiple(values); });
- EXPECT_TRUE(stack.is_invariant_maintained());
}
} // namespace blender::tests