Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--source/blender/blenlib/BLI_ghash.h34
-rw-r--r--source/blender/blenlib/intern/BLI_ghash.c622
-rw-r--r--tests/gtests/blenlib/BLI_ghash_test.cc292
3 files changed, 948 insertions, 0 deletions
diff --git a/source/blender/blenlib/BLI_ghash.h b/source/blender/blenlib/BLI_ghash.h
index a9f8d9fc268..43afaebac4c 100644
--- a/source/blender/blenlib/BLI_ghash.h
+++ b/source/blender/blenlib/BLI_ghash.h
@@ -75,6 +75,7 @@ GHash *BLI_ghash_copy(GHash *gh, GHashKeyCopyFP keycopyfp,
void BLI_ghash_free(GHash *gh, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp);
void BLI_ghash_reserve(GHash *gh, const unsigned int nentries_reserve);
void BLI_ghash_insert(GHash *gh, void *key, void *val);
+bool BLI_ghash_add(GHash *gh, void *key, void *val);
bool BLI_ghash_reinsert(GHash *gh, void *key, void *val, GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp);
void *BLI_ghash_lookup(GHash *gh, const void *key) ATTR_WARN_UNUSED_RESULT;
void *BLI_ghash_lookup_default(GHash *gh, const void *key, void *val_default) ATTR_WARN_UNUSED_RESULT;
@@ -91,6 +92,29 @@ unsigned int BLI_ghash_size(GHash *gh) ATTR_WARN_UNUSED_RESULT;
void BLI_ghash_flag_set(GHash *gh, unsigned int flag);
void BLI_ghash_flag_clear(GHash *gh, unsigned int flag);
+bool BLI_ghash_isdisjoint(GHash *gh1, GHash *gh2);
+bool BLI_ghash_isequal(GHash *gh1, GHash *gh2);
+bool BLI_ghash_issubset(GHash *gh1, GHash *gh2);
+bool BLI_ghash_issuperset(GHash *gh1, GHash *gh2);
+
+GHash *BLI_ghash_union(GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp, GHash *gh1, GHash *gh2, ...);
+GHash *BLI_ghash_union_reversed(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, ...);
+GHash *BLI_ghash_intersection(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, ...);
+GHash *BLI_ghash_difference(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, ...);
+GHash *BLI_ghash_symmetric_difference(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, ...);
+
/* *** */
GHashIterator *BLI_ghashIterator_new(GHash *gh) ATTR_MALLOC ATTR_WARN_UNUSED_RESULT;
@@ -234,6 +258,16 @@ void BLI_gset_clear_ex(GSet *gs, GSetKeyFreeFP keyfreefp,
const unsigned int nentries_reserve);
void BLI_gset_clear(GSet *gs, GSetKeyFreeFP keyfreefp);
+bool BLI_gset_isdisjoint(GSet *gs1, GSet *gs2);
+bool BLI_gset_isequal(GSet *gs1, GSet *gs2);
+bool BLI_gset_issubset(GSet *gs1, GSet *gs2);
+bool BLI_gset_issuperset(GSet *gs1, GSet *gs2);
+
+GSet *BLI_gset_union(GSetKeyCopyFP keycopyfp, GSet *gs1, GSet *gs2, ...);
+GSet *BLI_gset_intersection(GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet *gs1, GSet *gs2, ...);
+GSet *BLI_gset_difference(GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet *gs1, GSet *gs2, ...);
+GSet *BLI_gset_symmetric_difference(GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet *gs1, GSet *gs2, ...);
+
GSet *BLI_gset_ptr_new_ex(const char *info,
const unsigned int nentries_reserve) ATTR_MALLOC ATTR_WARN_UNUSED_RESULT;
GSet *BLI_gset_ptr_new(const char *info);
diff --git a/source/blender/blenlib/intern/BLI_ghash.c b/source/blender/blenlib/intern/BLI_ghash.c
index 9287d62a683..f6f203cc1ef 100644
--- a/source/blender/blenlib/intern/BLI_ghash.c
+++ b/source/blender/blenlib/intern/BLI_ghash.c
@@ -233,6 +233,9 @@ static void ghash_buckets_resize(GHash *gh, const unsigned int nbuckets)
}
}
+//#include "PIL_time.h"
+//#include "PIL_time_utildefines.h"
+
/**
* Check if the number of items in the GHash is large enough to require more buckets,
* or small enough to require less buckets, and resize \a gh accordingly.
@@ -614,6 +617,321 @@ static GHash *ghash_copy(GHash *gh, GHashKeyCopyFP keycopyfp, GHashValCopyFP val
return gh_new;
}
+/**
+ * Merge \a gh2 into \a gh1 (keeping entries already in \a gh1 unchanged), and then each subsequent given GHash.
+ * If \a gh1 is NULL, a new GHash will be created first (avoids modifying \a gh1 in place).
+ * If \a reverse is True, entries present in latest GHash will override those in former GHash.
+ */
+static GHash *ghash_union(
+ const bool reverse,
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, va_list arg)
+{
+ GHash *ghn = gh2;
+
+ BLI_assert(ghn);
+
+ if (!gh1) {
+ gh1 = ghash_copy(ghn, keycopyfp, valcopyfp);
+ ghn = va_arg(arg, GHash *);
+ }
+
+ for ( ; ghn; ghn = va_arg(arg, GHash *)) {
+ unsigned int i;
+
+ BLI_assert(gh1->cmpfp == ghn->cmpfp);
+ BLI_assert(gh1->hashfp == ghn->hashfp);
+ BLI_assert(gh1->is_gset == ghn->is_gset);
+
+ for (i = 0; i < ghn->nbuckets; i++) {
+ Entry *e;
+
+ for (e = ghn->buckets[i]; e; e = e->next) {
+ Entry *e_gh1;
+ const unsigned int hash = ghash_entryhash(gh1, e);
+ const unsigned int gh1_bucket_index = ghash_bucket_index(gh1, hash);
+
+ if ((e_gh1 = ghash_lookup_entry_ex(gh1, e->key, gh1_bucket_index)) == NULL) {
+ Entry *e_new = BLI_mempool_alloc(gh1->entrypool);
+
+ ghash_entry_copy(gh1, e_new, ghn, e, keycopyfp, valcopyfp);
+
+ /* As with copy, this does not preserve order (but this would be even less meaningful here). */
+ e_new->next = gh1->buckets[gh1_bucket_index];
+ gh1->buckets[gh1_bucket_index] = e_new;
+ ghash_buckets_expand(gh1, ++gh1->nentries, false);
+ }
+ else if (reverse) {
+ if (keyfreefp) keyfreefp(e_gh1->key);
+ if (valfreefp) valfreefp(((GHashEntry *)e_gh1)->val);
+
+ ghash_entry_copy(gh1, e_gh1, ghn, e, keycopyfp, valcopyfp);
+ }
+ }
+ }
+ }
+
+ return gh1;
+}
+
+/**
+ * Remove all entries in \a gh1 which keys are not present in \a gh2 and all subsequent given GHash.
+ * If \a gh1 is NULL, a new GHash will be created first (avoids modifying \a gh1 in place).
+ */
+static GHash *ghash_intersection(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, va_list arg)
+{
+ GHash *ghn = gh2;
+
+ BLI_assert(ghn);
+
+ if (!gh1) {
+ gh1 = ghash_copy(ghn, keycopyfp, valcopyfp);
+ ghn = va_arg(arg, GHash *);
+ }
+
+ BLI_assert(!valfreefp || !gh1->is_gset);
+
+ for ( ; ghn; ghn = va_arg(arg, GHash *)) {
+ unsigned int new_gh1_nentries = gh1->nentries;
+ unsigned int i;
+
+ BLI_assert(gh1->cmpfp == ghn->cmpfp);
+ BLI_assert(gh1->hashfp == ghn->hashfp);
+
+ for (i = 0; i < gh1->nbuckets; i++) {
+ Entry *e, *e_prev = NULL, *e_next;
+
+ for (e = gh1->buckets[i]; e; e = e_next) {
+ const unsigned int hash = ghash_entryhash(gh1, e);
+ const unsigned int ghn_bucket_index = ghash_bucket_index(ghn, hash);
+
+ e_next = e->next;
+
+ if (ghash_lookup_entry_ex(ghn, e->key, ghn_bucket_index) == NULL) {
+ if (keyfreefp) keyfreefp(e->key);
+ if (valfreefp) valfreefp(((GHashEntry *)e)->val);
+
+ if (e_prev) e_prev->next = e_next;
+ else gh1->buckets[i] = e_next;
+
+ /* We cannot resize gh1 while we are looping on it!!! */
+ new_gh1_nentries--;
+ BLI_mempool_free(gh1->entrypool, e);
+ }
+ else {
+ e_prev = e;
+ }
+ }
+ }
+
+ gh1->nentries = new_gh1_nentries;
+ /* We force shrinking here (if needed). */
+ ghash_buckets_contract(gh1, gh1->nentries, false, true);
+ }
+
+ return gh1;
+}
+
+/**
+ * Remove all entries in \a gh1 which keys are present in \a gh2 or any subsequent given GHash.
+ * If \a gh1 is NULL, a new GHash will be created first (avoids modifying \a gh1 in place).
+ */
+static GHash *ghash_difference(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, va_list arg)
+{
+ GHash *ghn = gh2;
+
+ BLI_assert(ghn);
+
+ if (!gh1) {
+ gh1 = ghash_copy(ghn, keycopyfp, valcopyfp);
+ ghn = va_arg(arg, GHash *);
+ }
+
+ BLI_assert(!valfreefp || !gh1->is_gset);
+
+ for ( ; ghn; ghn = va_arg(arg, GHash *)) {
+ unsigned int new_gh1_nentries = gh1->nentries;
+ unsigned int i;
+
+ BLI_assert(gh1->cmpfp == ghn->cmpfp);
+ BLI_assert(gh1->hashfp == ghn->hashfp);
+
+ for (i = 0; i < gh1->nbuckets; i++) {
+ Entry *e, *e_prev = NULL, *e_next;
+
+ for (e = gh1->buckets[i]; e; e = e_next) {
+ const unsigned int hash = ghash_entryhash(gh1, e);
+ const unsigned int ghn_bucket_index = ghash_bucket_index(ghn, hash);
+
+ e_next = e->next;
+
+ if (ghash_lookup_entry_ex(ghn, e->key, ghn_bucket_index) != NULL) {
+ if (keyfreefp) keyfreefp(e->key);
+ if (valfreefp) valfreefp(((GHashEntry *)e)->val);
+
+ if (e_prev) e_prev->next = e_next;
+ else gh1->buckets[i] = e_next;
+
+ /* We cannot resize gh1 while we are looping on it!!! */
+ new_gh1_nentries--;
+ BLI_mempool_free(gh1->entrypool, e);
+ }
+ else {
+ e_prev = e;
+ }
+ }
+ }
+
+ gh1->nentries = new_gh1_nentries;
+ /* We force shrinking here (if needed). */
+ ghash_buckets_contract(gh1, gh1->nentries, false, true);
+ }
+
+ return gh1;
+}
+
+/**
+ * Set \a gh1 to only contain entries which keys are present in one and only one of all given ghash.
+ * If \a gh1 is NULL, a new GHash will be created first (avoids modifying \a gh1 in place).
+ */
+static GHash *ghash_symmetric_difference(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, va_list arg)
+{
+ GHash *ghn = gh2;
+ unsigned int i;
+
+ /* Temp storage, we never copy key/values here, just borrow them from real ghash. */
+ /* Warning! rem_keys is used as gset (i.e. no val memory reserved). */
+ GHash *keys, *rem_keys;
+
+ BLI_assert(ghn);
+
+ if (!gh1) {
+ gh1 = ghash_copy(ghn, keycopyfp, valcopyfp);
+ ghn = va_arg(arg, GHash *);
+ }
+
+ BLI_assert(!valfreefp || !gh1->is_gset);
+
+ keys = ghash_copy(gh1, NULL, NULL);
+ rem_keys = ghash_new(gh1->hashfp, gh1->cmpfp, __func__, 64, true);
+
+ /* First pass: all key found at least once is in keys, all key found at least twice is in rem_keys. */
+ for ( ; ghn; ghn = va_arg(arg, GHash *)) {
+ BLI_assert(gh1->cmpfp == ghn->cmpfp);
+ BLI_assert(gh1->hashfp == ghn->hashfp);
+ BLI_assert((gh1->flag & GHASH_FLAG_IS_GSET) == (ghn->is_gset & GHASH_FLAG_IS_GSET));
+
+ for (i = 0; i < ghn->nbuckets; i++) {
+ Entry *e;
+
+ for (e = ghn->buckets[i]; e; e = e->next) {
+ const unsigned int hash = ghash_entryhash(ghn, e);
+ const unsigned int keys_bucket_index = ghash_bucket_index(keys, hash);
+
+ if (ghash_lookup_entry_ex(keys, e->key, keys_bucket_index) != NULL) {
+ const unsigned int rem_keys_bucket_index = ghash_bucket_index(rem_keys, hash);
+ Entry *e_new = BLI_mempool_alloc(rem_keys->entrypool);
+
+ ghash_entry_copy(rem_keys, e_new, ghn, e, NULL, NULL);
+
+ /* As with copy, this does not preserve order (but this would be even less meaningful here). */
+ e_new->next = rem_keys->buckets[rem_keys_bucket_index];
+ rem_keys->buckets[rem_keys_bucket_index] = e_new;
+ ghash_buckets_expand(rem_keys, ++rem_keys->nentries, false);
+ }
+ else {
+ Entry *e_new = BLI_mempool_alloc(keys->entrypool);
+
+ ghash_entry_copy(keys, e_new, ghn, e, NULL, NULL);
+
+ /* As with copy, this does not preserve order (but this would be even less meaningful here). */
+ e_new->next = keys->buckets[keys_bucket_index];
+ keys->buckets[keys_bucket_index] = e_new;
+ ghash_buckets_expand(keys, ++keys->nentries, false);
+ }
+ }
+ }
+ }
+
+ /* Now, keys we actually want are (keys - rem_keys) */
+ for (i = 0; i < rem_keys->nbuckets; i++) {
+ Entry *e;
+
+ for (e = rem_keys->buckets[i]; e; e = e->next) {
+ Entry *e_prev, *e_curr;
+ const unsigned int hash = ghash_entryhash(rem_keys, e);
+ const unsigned int keys_bucket_index = ghash_bucket_index(keys, hash);
+ const unsigned int gh1_bucket_index = ghash_bucket_index(gh1, hash);
+
+ e_curr = ghash_lookup_entry_prev_ex(keys, e->key, &e_prev, keys_bucket_index);
+ BLI_assert(e_curr != NULL); /* All keys in rem_keys must exist in keys! */
+
+ if (e_prev) e_prev->next = e_curr->next;
+ else keys->buckets[keys_bucket_index] = e_curr->next;
+
+ /* We do not care about shrinking keys' buckets here! */
+ keys->nentries--;
+ BLI_mempool_free(keys->entrypool, e_curr);
+
+ /* Also remove keys from gh1 if possible, since we are at it... */
+ e_curr = ghash_lookup_entry_prev_ex(gh1, e->key, &e_prev, gh1_bucket_index);
+ if (e_curr) {
+ if (e_prev) e_prev->next = e_curr->next;
+ else gh1->buckets[gh1_bucket_index] = e_curr->next;
+
+ /* Note: We can free key/value here, because we won't use them again (have been removed
+ * from keys already, and we won't use matching entry from rem_key again either. */
+ if (keyfreefp) keyfreefp(e_curr->key);
+ if (valfreefp) valfreefp(((GHashEntry *)e_curr)->val);
+
+ /* We do not care about shrinking gh1's buckets here for now! */
+ gh1->nentries--;
+ BLI_mempool_free(gh1->entrypool, e_curr);
+ }
+ }
+ }
+
+ BLI_ghash_free(rem_keys, NULL, NULL);
+
+ /* Final step: add (copy) all entries from keys which are not already in gh1. */
+ for (i = 0; i < keys->nbuckets; i++) {
+ Entry *e;
+
+ for (e = keys->buckets[i]; e; e = e->next) {
+ const unsigned int hash = ghash_entryhash(keys, e);
+ const unsigned int gh1_bucket_index = ghash_bucket_index(gh1, hash);
+
+ if (ghash_lookup_entry_ex(gh1, e->key, gh1_bucket_index) == NULL) {
+ Entry *e_new = BLI_mempool_alloc(gh1->entrypool);
+
+ ghash_entry_copy(gh1, e_new, keys, e, keycopyfp, valcopyfp);
+
+ /* As with copy, this does not preserve order (but this would be even less meaningful here). */
+ e_new->next = gh1->buckets[gh1_bucket_index];
+ gh1->buckets[gh1_bucket_index] = e_new;
+ ghash_buckets_expand(gh1, ++gh1->nentries, false);
+ }
+ }
+ }
+
+ BLI_ghash_free(keys, NULL, NULL);
+
+ /* We force shrinking here (if needed). */
+ ghash_buckets_contract(gh1, gh1->nentries, false, true);
+
+ return gh1;
+}
+
/** \} */
@@ -902,6 +1220,225 @@ void BLI_ghash_flag_clear(GHash *gh, unsigned int flag)
gh->flag &= ~flag;
}
+/**
+ * Check whether no key from \a gh1 exists in \a gh2.
+ */
+bool BLI_ghash_isdisjoint(GHash *gh1, GHash *gh2)
+{
+ /* Note: For now, take a basic, brute force approach.
+ * If we switch from modulo to masking, we may have ways to optimize this, though. */
+ unsigned int i;
+
+ BLI_assert(gh1->cmpfp == gh2->cmpfp);
+ BLI_assert(gh1->hashfp == gh2->hashfp);
+
+ if (gh1->nentries > gh2->nentries)
+ {
+ SWAP(GHash *, gh1, gh2);
+ }
+
+ for (i = 0; i < gh1->nbuckets; i++) {
+ Entry *e;
+
+ for (e = gh1->buckets[i]; e; e = e->next) {
+ const unsigned int hash = ghash_entryhash(gh1, e);
+ const unsigned int gh2_bucket_index = ghash_bucket_index(gh2, hash);
+ if (ghash_lookup_entry_ex(gh2, e->key, gh2_bucket_index)) {
+ return false;
+ }
+ }
+ }
+
+ return true;
+}
+
+/**
+ * Check whether \a gh1 and \a gh2 contain exactly the same keys.
+ */
+bool BLI_ghash_isequal(GHash *gh1, GHash *gh2)
+{
+ unsigned int i;
+
+ BLI_assert(gh1->cmpfp == gh2->cmpfp);
+ BLI_assert(gh1->hashfp == gh2->hashfp);
+
+ if (gh1->nentries != gh2->nentries) {
+ return false;
+ }
+
+ for (i = 0; i < gh1->nbuckets; i++) {
+ Entry *e;
+
+ for (e = gh1->buckets[i]; e; e = e->next) {
+ const unsigned int hash = ghash_entryhash(gh1, e);
+ unsigned int gh2_bucket_index = ghash_bucket_index(gh2, hash);
+ if (!ghash_lookup_entry_ex(gh2, e->key, gh2_bucket_index)) {
+ return false;
+ }
+ }
+ }
+
+ return true;
+}
+
+/**
+ * Check whether \a gh2 keys are a subset of \a gh1 keys.
+ * gh1 >= gh2
+ *
+ * Note: Strict subset is (gh1 >= gh2) && (gh1->nentries != gh2->nentries).
+ */
+bool BLI_ghash_issubset(GHash *gh1, GHash *gh2)
+{
+ unsigned int i;
+
+ BLI_assert(gh1->cmpfp == gh2->cmpfp);
+ BLI_assert(gh1->hashfp == gh2->hashfp);
+
+ if (gh1->nentries < gh2->nentries) {
+ return false;
+ }
+
+ for (i = 0; i < gh2->nbuckets; i++) {
+ Entry *e;
+
+ for (e = gh2->buckets[i]; e; e = e->next) {
+ const unsigned int hash = ghash_entryhash(gh2, e);
+ const unsigned int gh1_bucket_index = ghash_bucket_index(gh1, hash);
+ if (!ghash_lookup_entry_ex(gh1, e->key, gh1_bucket_index)) {
+ return false;
+ }
+ }
+ }
+
+ return true;
+}
+
+/**
+ * Check whether \a gh2 keys are a superset of \a gh1 keys.
+ * gh1 <= gh2
+ *
+ * Note: Strict superset is (gh1 <= gh2) && (gh1->nentries != gh2->nentries).
+ */
+bool BLI_ghash_issuperset(GHash *gh1, GHash *gh2)
+{
+ return BLI_ghash_issubset(gh2, gh1);
+}
+
+/**
+ * Union, from left to right.
+ * Similar to python's \a PyDict_Merge(a, b, false).
+ * If \a gh1 is NULL, a new GHash is returned, otherwise \a gh1 is modified in place.
+ *
+ * Notes:
+ * * All given GHash shall share the same comparison and hashing functions!
+ * * All given GHash **must** be valid GHash (no GSet allowed here).
+ */
+GHash *BLI_ghash_union(GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp, GHash *gh1, GHash *gh2, ...)
+{
+ GHash *gh_ret;
+ va_list arg;
+
+ va_start(arg, gh2);
+ gh_ret = ghash_union(false, keycopyfp, valcopyfp, NULL, NULL, gh1, gh2, arg);
+ va_end(arg);
+
+ return gh_ret;
+}
+
+/**
+ * Union, from right to left (less efficient, since it may have to allocate and then free key/val pairs).
+ * Similar to python's \a PyDict_Merge(a, b, true).
+ * If \a gh1 is NULL, a new GHash is returned, otherwise \a gh1 is modified in place and returned.
+ *
+ * Notes:
+ * * All given GHash shall share the same comparison and hashing functions!
+ * * All given GHash **must** be valid GHash (no GSet allowed here).
+ */
+GHash *BLI_ghash_union_reversed(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, ...)
+{
+ GHash *gh_ret;
+ va_list arg;
+
+ va_start(arg, gh2);
+ gh_ret = ghash_union(true, keycopyfp, valcopyfp, keyfreefp, valfreefp, gh1, gh2, arg);
+ va_end(arg);
+
+ return gh_ret;
+}
+
+/**
+ * Intersection (i.e. entries which keys exist in all gh1, gh2, ...).
+ * If \a gh1 is NULL, a new GHash is returned, otherwise \a gh1 is modified in place and returned.
+ *
+ * Notes:
+ * * All given GHash shall share the same comparison and hashing functions!
+ * * First given GHash **must** be a valid GHash, subsequent ones may be GSet.
+ */
+GHash *BLI_ghash_intersection(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, ...)
+{
+ GHash *gh_ret;
+ va_list arg;
+
+ va_start(arg, gh2);
+ gh_ret = ghash_intersection(keycopyfp, valcopyfp, keyfreefp, valfreefp, gh1, gh2, arg);
+ va_end(arg);
+
+ return gh_ret;
+}
+
+/**
+ * Difference, i.e. remove all entries in \a gh1 which keys are present in \a gh2 or any subsequent given GHash.
+ * If \a gh1 is NULL, a new GHash is returned, otherwise \a gh1 is modified in place and returned.
+ *
+ * Notes:
+ * * All given GHash shall share the same comparison and hashing functions!
+ * * First given GHash **must** be a valid GHash, subsequent ones may be GSet.
+ */
+GHash *BLI_ghash_difference(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, ...)
+{
+ GHash *gh_ret;
+ va_list arg;
+
+ va_start(arg, gh2);
+ gh_ret = ghash_difference(keycopyfp, valcopyfp, keyfreefp, valfreefp, gh1, gh2, arg);
+ va_end(arg);
+
+ return gh_ret;
+}
+
+/**
+ * Symmetric difference,
+ * i.e. such as \a gh1 to only contain entries which keys are present in one and only one of all given ghash.
+ * If \a gh1 is NULL, a new GHash is returned, otherwise \a gh1 is modified in place and returned.
+ *
+ * Notes:
+ * * All given GHash shall share the same comparison and hashing functions!
+ * * All given GHash **must** be valid GHash (no GSet allowed here).
+ */
+GHash *BLI_ghash_symmetric_difference(
+ GHashKeyCopyFP keycopyfp, GHashValCopyFP valcopyfp,
+ GHashKeyFreeFP keyfreefp, GHashValFreeFP valfreefp,
+ GHash *gh1, GHash *gh2, ...)
+{
+ GHash *gh_ret;
+ va_list arg;
+
+ va_start(arg, gh2);
+ gh_ret = ghash_symmetric_difference(keycopyfp, valcopyfp, keyfreefp, valfreefp, gh1, gh2, arg);
+ va_end(arg);
+
+ return gh_ret;
+}
+
/** \} */
@@ -1341,6 +1878,91 @@ void BLI_gset_flag_clear(GSet *gs, unsigned int flag)
((GHash *)gs)->flag &= ~flag;
}
+bool BLI_gset_isdisjoint(GSet *gs1, GSet *gs2)
+{
+ return BLI_ghash_isdisjoint((GHash *)gs1, (GHash *)gs2);
+}
+
+bool BLI_gset_isequal(GSet *gs1, GSet *gs2)
+{
+ return BLI_ghash_isequal((GHash *)gs1, (GHash *)gs2);
+}
+
+bool BLI_gset_issubset(GSet *gs1, GSet *gs2)
+{
+ return BLI_ghash_issubset((GHash *)gs1, (GHash *)gs2);
+}
+
+bool BLI_gset_issuperset(GSet *gs1, GSet *gs2)
+{
+ return BLI_ghash_issubset((GHash *)gs2, (GHash *)gs1);
+}
+
+/**
+ * Union (no left to right/right to left here, this makes no sense in set context (i.e. no value)).
+ * If \a gs1 is NULL, a new GSet is returned, otherwise \a gs1 is modified in place.
+ */
+GSet *BLI_gset_union(GSetKeyCopyFP keycopyfp, GSet *gs1, GSet *gs2, ...)
+{
+ GSet *gs_ret;
+ va_list arg;
+
+ va_start(arg, gs2);
+ gs_ret = (GSet *)ghash_union(false, keycopyfp, NULL, NULL, NULL, (GHash *)gs1, (GHash *)gs2, arg);
+ va_end(arg);
+
+ return gs_ret;
+}
+
+/**
+ * Intersection (i.e. entries which keys exist in all gs1, gs2, ...).
+ * If \a gs1 is NULL, a new GSet is returned, otherwise \a gs1 is modified in place.
+ */
+GSet *BLI_gset_intersection(GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet *gs1, GSet *gs2, ...)
+{
+ GSet *gs_ret;
+ va_list arg;
+
+ va_start(arg, gs2);
+ gs_ret = (GSet *)ghash_intersection(keycopyfp, NULL, keyfreefp, NULL, (GHash *)gs1, (GHash *)gs2, arg);
+ va_end(arg);
+
+ return gs_ret;
+}
+
+/**
+ * Difference, i.e. remove all entries in \a gs1 which keys are present in \a gs2 or any subsequent given GSet.
+ * If \a gs1 is NULL, a new GSet is returned, otherwise \a gs1 is modified in place.
+ */
+GSet *BLI_gset_difference(GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet *gs1, GSet *gs2, ...)
+{
+ GSet *gs_ret;
+ va_list arg;
+
+ va_start(arg, gs2);
+ gs_ret = (GSet *)ghash_difference(keycopyfp, NULL, keyfreefp, NULL, (GHash *)gs1, (GHash *)gs2, arg);
+ va_end(arg);
+
+ return gs_ret;
+}
+
+/**
+ * Symmetric difference,
+ * i.e. such as \a gs1 to only contain entries which keys are present in one and only one of all given gset.
+ * If \a gs1 is NULL, a new GSet is returned, otherwise \a gs1 is modified in place.
+ */
+GSet *BLI_gset_symmetric_difference(GSetKeyCopyFP keycopyfp, GSetKeyFreeFP keyfreefp, GSet *gs1, GSet *gs2, ...)
+{
+ GSet *gs_ret;
+ va_list arg;
+
+ va_start(arg, gs2);
+ gs_ret = (GSet *)ghash_symmetric_difference(keycopyfp, NULL, keyfreefp, NULL, (GHash *)gs1, (GHash *)gs2, arg);
+ va_end(arg);
+
+ return gs_ret;
+}
+
/** \} */
diff --git a/tests/gtests/blenlib/BLI_ghash_test.cc b/tests/gtests/blenlib/BLI_ghash_test.cc
index 5fe43d14cbe..89987746a98 100644
--- a/tests/gtests/blenlib/BLI_ghash_test.cc
+++ b/tests/gtests/blenlib/BLI_ghash_test.cc
@@ -156,3 +156,295 @@ TEST(ghash, Copy)
BLI_ghash_free(ghash, NULL, NULL);
BLI_ghash_free(ghash_copy, NULL, NULL);
}
+
+/* Check disjoint. */
+TEST(ghash, Disjoint)
+{
+ GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ unsigned int keys[TESTCASE_SIZE], *k;
+ int i;
+
+ init_keys(keys, 40);
+
+ for (i = (TESTCASE_SIZE - 2) / 2, k = keys; i--; k++) {
+ BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
+ }
+ for (i = (TESTCASE_SIZE - 2) / 2; i--; k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
+
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(keys[0]), SET_UINT_IN_POINTER(keys[0]));
+
+ EXPECT_FALSE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
+
+ BLI_ghash_remove(ghash_2, SET_UINT_IN_POINTER(keys[0]), NULL, NULL);
+
+ EXPECT_TRUE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
+
+ BLI_ghash_free(ghash_1, NULL, NULL);
+ BLI_ghash_free(ghash_2, NULL, NULL);
+}
+
+/* Check equality. */
+TEST(ghash, Equal)
+{
+ GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ unsigned int keys[TESTCASE_SIZE], *k;
+ int i;
+
+ init_keys(keys, 50);
+
+ for (i = TESTCASE_SIZE, k = keys; i--; k++) {
+ BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
+ }
+ for (i = 0, k = keys; i < TESTCASE_SIZE; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_1, ghash_2));
+
+ BLI_ghash_remove(ghash_2, SET_UINT_IN_POINTER(keys[TESTCASE_SIZE / 2]), NULL, NULL);
+
+ EXPECT_FALSE(BLI_ghash_isequal(ghash_1, ghash_2));
+
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(keys[TESTCASE_SIZE / 2]), SET_UINT_IN_POINTER(keys[TESTCASE_SIZE / 2]));
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_1, ghash_2));
+
+ BLI_ghash_free(ghash_1, NULL, NULL);
+ BLI_ghash_free(ghash_2, NULL, NULL);
+}
+
+/* Check subset. */
+TEST(ghash, Subset)
+{
+ GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ unsigned int keys[TESTCASE_SIZE], *k;
+ int i;
+
+ init_keys(keys, 60);
+
+ for (i = TESTCASE_SIZE, k = keys; i--; k++) {
+ BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
+ }
+ for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_issubset(ghash_1, ghash_2));
+
+ BLI_ghash_remove(ghash_1, SET_UINT_IN_POINTER(keys[0]), NULL, NULL);
+
+ EXPECT_FALSE(BLI_ghash_issubset(ghash_1, ghash_2));
+
+ BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(keys[0]), SET_UINT_IN_POINTER(keys[0]));
+
+ EXPECT_TRUE(BLI_ghash_issubset(ghash_1, ghash_2));
+
+ BLI_ghash_free(ghash_1, NULL, NULL);
+ BLI_ghash_free(ghash_2, NULL, NULL);
+}
+
+/* Check Union (straight and reversed). */
+TEST(ghash, Union)
+{
+ GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_U, *ghash_U_rev;
+ unsigned int keys[TESTCASE_SIZE], *k;
+ int i;
+
+ init_keys(keys, 70);
+
+ for (i = TESTCASE_SIZE / 2, k = keys; i--; k++) {
+ BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
+ }
+ for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_1, ghash_2));
+
+ ghash_U = BLI_ghash_union(NULL, NULL, NULL, ghash_1, ghash_2, NULL);
+ ghash_U_rev = BLI_ghash_union_reversed(NULL, NULL, NULL, NULL, NULL, ghash_1, ghash_2, NULL);
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_U, ghash_1));
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_U_rev, ghash_1));
+
+ for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
+ void *v = BLI_ghash_lookup(ghash_U, SET_UINT_IN_POINTER(*k));
+ EXPECT_EQ(*k, GET_UINT_FROM_POINTER(v));
+
+ v = BLI_ghash_lookup(ghash_U_rev, SET_UINT_IN_POINTER(*k));
+ EXPECT_EQ(i, GET_INT_FROM_POINTER(v));
+ }
+
+ BLI_ghash_free(ghash_2, NULL, NULL);
+ BLI_ghash_free(ghash_U, NULL, NULL);
+ ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ for (i = TESTCASE_SIZE / 2, k = &keys[i]; i < TESTCASE_SIZE; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
+
+ ghash_U = BLI_ghash_union(NULL, NULL, NULL, ghash_1, ghash_2, NULL);
+
+ EXPECT_TRUE(BLI_ghash_issubset(ghash_U, ghash_1));
+ EXPECT_TRUE(BLI_ghash_issubset(ghash_U, ghash_2));
+
+ BLI_ghash_free(ghash_1, NULL, NULL);
+ BLI_ghash_free(ghash_2, NULL, NULL);
+ BLI_ghash_free(ghash_U, NULL, NULL);
+ BLI_ghash_free(ghash_U_rev, NULL, NULL);
+}
+
+/* Check Intersection. */
+TEST(ghash, Intersection)
+{
+ GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_I;
+ unsigned int keys[TESTCASE_SIZE], *k;
+ int i;
+
+ init_keys(keys, 80);
+
+ for (i = TESTCASE_SIZE / 2, k = keys; i--; k++) {
+ BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
+ }
+ for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_1, ghash_2));
+
+ ghash_I = BLI_ghash_intersection(NULL, NULL, NULL, NULL, NULL, ghash_1, ghash_2, NULL);
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_I, ghash_1));
+
+ BLI_ghash_free(ghash_2, NULL, NULL);
+ BLI_ghash_free(ghash_I, NULL, NULL);
+ ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ for (i = TESTCASE_SIZE / 2, k = &keys[i]; i < TESTCASE_SIZE; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
+
+ ghash_I = BLI_ghash_intersection(NULL, NULL, NULL, NULL, NULL, ghash_1, ghash_2, NULL);
+
+ EXPECT_EQ(0, BLI_ghash_size(ghash_I));
+
+ BLI_ghash_free(ghash_1, NULL, NULL);
+ BLI_ghash_free(ghash_2, NULL, NULL);
+ BLI_ghash_free(ghash_I, NULL, NULL);
+}
+
+/* Check Difference. */
+TEST(ghash, Difference)
+{
+ GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_D;
+ unsigned int keys[TESTCASE_SIZE], *k;
+ int i;
+
+ init_keys(keys, 90);
+
+ for (i = TESTCASE_SIZE / 2, k = keys; i--; k++) {
+ BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
+ }
+ for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_1, ghash_2));
+
+ ghash_D = BLI_ghash_difference(NULL, NULL, NULL, NULL, NULL, ghash_1, ghash_2, NULL);
+
+ EXPECT_EQ(0, BLI_ghash_size(ghash_D));
+
+ BLI_ghash_free(ghash_2, NULL, NULL);
+ BLI_ghash_free(ghash_D, NULL, NULL);
+ ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ for (i = TESTCASE_SIZE / 2, k = &keys[i]; i < TESTCASE_SIZE; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
+
+ ghash_D = BLI_ghash_difference(NULL, NULL, NULL, NULL, NULL, ghash_1, ghash_2, NULL);
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_D, ghash_1));
+
+ BLI_ghash_free(ghash_1, NULL, NULL);
+ BLI_ghash_free(ghash_2, NULL, NULL);
+ BLI_ghash_free(ghash_D, NULL, NULL);
+}
+
+/* Check Symmetric Difference. */
+TEST(ghash, SymmDiff)
+{
+ GHash *ghash_1 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ GHash *ghash_SD;
+ unsigned int keys[TESTCASE_SIZE], *k;
+ int i;
+
+ init_keys(keys, 100);
+
+ for (i = TESTCASE_SIZE / 2, k = keys; i--; k++) {
+ BLI_ghash_insert(ghash_1, SET_UINT_IN_POINTER(*k), SET_UINT_IN_POINTER(*k));
+ }
+ for (i = 0, k = keys; i < TESTCASE_SIZE / 2; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_1, ghash_2));
+
+ ghash_SD = BLI_ghash_symmetric_difference(NULL, NULL, NULL, NULL, NULL, ghash_1, ghash_2, NULL);
+
+ EXPECT_EQ(0, BLI_ghash_size(ghash_SD));
+
+ ghash_SD = BLI_ghash_symmetric_difference(NULL, NULL, NULL, NULL, ghash_SD, ghash_1, NULL);
+
+ EXPECT_TRUE(BLI_ghash_isequal(ghash_SD, ghash_1));
+
+ BLI_ghash_free(ghash_2, NULL, NULL);
+ BLI_ghash_free(ghash_SD, NULL, NULL);
+ ghash_2 = BLI_ghash_new(BLI_ghashutil_inthash_p, BLI_ghashutil_intcmp, __func__);
+ for (i = TESTCASE_SIZE / 2, k = &keys[i]; i < TESTCASE_SIZE; i++, k++) {
+ /* Because values should have no effect at all here. */
+ BLI_ghash_insert(ghash_2, SET_UINT_IN_POINTER(*k), SET_INT_IN_POINTER(i));
+ }
+
+ EXPECT_TRUE(BLI_ghash_isdisjoint(ghash_1, ghash_2));
+
+ ghash_SD = BLI_ghash_symmetric_difference(NULL, NULL, NULL, NULL, NULL, ghash_1, ghash_2, NULL);
+
+ EXPECT_EQ(TESTCASE_SIZE, BLI_ghash_size(ghash_SD));
+
+ ghash_SD = BLI_ghash_symmetric_difference(NULL, NULL, NULL, NULL, ghash_SD, ghash_1, ghash_2, NULL);
+
+ EXPECT_EQ(0, BLI_ghash_size(ghash_SD));
+
+ BLI_ghash_free(ghash_1, NULL, NULL);
+ BLI_ghash_free(ghash_2, NULL, NULL);
+ BLI_ghash_free(ghash_SD, NULL, NULL);
+}