Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHans Goudey <h.goudey@me.com>2022-09-12 18:38:31 +0300
committerHans Goudey <h.goudey@me.com>2022-09-12 19:04:52 +0300
commit225b5a3491d9593639a80c9a34bcc017862eb2b2 (patch)
tree9efa12a3587569f6b8a2cac6391ab8ebd040118c
parente37f3388b1563591153fc82259cf549f7942dcf0 (diff)
BLI: Add utility functions to generic spans
Generally we don't want to do per-element operations on these spans because of the overhead of the runtime type system, but these operations on the whole span avoid ugly pointer arithmetic in other areas.
-rw-r--r--source/blender/blenlib/BLI_generic_span.hh57
1 files changed, 57 insertions, 0 deletions
diff --git a/source/blender/blenlib/BLI_generic_span.hh b/source/blender/blenlib/BLI_generic_span.hh
index 143ab235d2e..e7a08988c46 100644
--- a/source/blender/blenlib/BLI_generic_span.hh
+++ b/source/blender/blenlib/BLI_generic_span.hh
@@ -100,6 +100,34 @@ class GSpan {
{
return this->slice(range.start(), range.size());
}
+
+ GSpan drop_front(const int64_t n) const
+ {
+ BLI_assert(n >= 0);
+ const int64_t new_size = std::max<int64_t>(0, size_ - n);
+ return GSpan(*type_, POINTER_OFFSET(data_, type_->size() * n), new_size);
+ }
+
+ GSpan drop_back(const int64_t n) const
+ {
+ BLI_assert(n >= 0);
+ const int64_t new_size = std::max<int64_t>(0, size_ - n);
+ return GSpan(*type_, data_, new_size);
+ }
+
+ GSpan take_front(const int64_t n) const
+ {
+ BLI_assert(n >= 0);
+ const int64_t new_size = std::min<int64_t>(size_, n);
+ return GSpan(*type_, data_, new_size);
+ }
+
+ GSpan take_back(const int64_t n) const
+ {
+ BLI_assert(n >= 0);
+ const int64_t new_size = std::min<int64_t>(size_, n);
+ return GSpan(*type_, POINTER_OFFSET(data_, type_->size() * (size_ - new_size)), new_size);
+ }
};
/**
@@ -199,6 +227,35 @@ class GMutableSpan {
return this->slice(range.start(), range.size());
}
+ GMutableSpan drop_front(const int64_t n) const
+ {
+ BLI_assert(n >= 0);
+ const int64_t new_size = std::max<int64_t>(0, size_ - n);
+ return GMutableSpan(*type_, POINTER_OFFSET(data_, type_->size() * n), new_size);
+ }
+
+ GMutableSpan drop_back(const int64_t n) const
+ {
+ BLI_assert(n >= 0);
+ const int64_t new_size = std::max<int64_t>(0, size_ - n);
+ return GMutableSpan(*type_, data_, new_size);
+ }
+
+ GMutableSpan take_front(const int64_t n) const
+ {
+ BLI_assert(n >= 0);
+ const int64_t new_size = std::min<int64_t>(size_, n);
+ return GMutableSpan(*type_, data_, new_size);
+ }
+
+ GMutableSpan take_back(const int64_t n) const
+ {
+ BLI_assert(n >= 0);
+ const int64_t new_size = std::min<int64_t>(size_, n);
+ return GMutableSpan(
+ *type_, POINTER_OFFSET(data_, type_->size() * (size_ - new_size)), new_size);
+ }
+
/**
* Copy all values from another span into this span. This invokes undefined behavior when the
* destination contains uninitialized data and T is not trivially copy constructible.