diff options
Diffstat (limited to 'source/blender/blenkernel/intern')
49 files changed, 4276 insertions, 478 deletions
diff --git a/source/blender/blenkernel/intern/asset.cc b/source/blender/blenkernel/intern/asset.cc index f74018b20c5..ae9ded3c754 100644 --- a/source/blender/blenkernel/intern/asset.cc +++ b/source/blender/blenkernel/intern/asset.cc @@ -26,8 +26,10 @@ #include "BLI_listbase.h" #include "BLI_string.h" +#include "BLI_string_ref.hh" #include "BLI_string_utils.h" #include "BLI_utildefines.h" +#include "BLI_uuid.h" #include "BKE_asset.h" #include "BKE_icons.h" @@ -37,6 +39,8 @@ #include "MEM_guardedalloc.h" +using namespace blender; + AssetMetaData *BKE_asset_metadata_create(void) { AssetMetaData *asset_data = (AssetMetaData *)MEM_callocN(sizeof(*asset_data), __func__); @@ -115,6 +119,27 @@ void BKE_asset_library_reference_init_default(AssetLibraryReference *library_ref memcpy(library_ref, DNA_struct_default_get(AssetLibraryReference), sizeof(*library_ref)); } +void BKE_asset_metadata_catalog_id_clear(struct AssetMetaData *asset_data) +{ + asset_data->catalog_id = BLI_uuid_nil(); + asset_data->catalog_simple_name[0] = '\0'; +} + +void BKE_asset_metadata_catalog_id_set(struct AssetMetaData *asset_data, + const ::bUUID catalog_id, + const char *catalog_simple_name) +{ + asset_data->catalog_id = catalog_id; + + constexpr size_t max_simple_name_length = sizeof(asset_data->catalog_simple_name); + + /* The substr() call is necessary to make copy() copy the first N characters (instead of refusing + * to copy and producing an empty string). */ + StringRef trimmed_id = + StringRef(catalog_simple_name).trim().substr(0, max_simple_name_length - 1); + trimmed_id.copy(asset_data->catalog_simple_name, max_simple_name_length); +} + /* Queries -------------------------------------------- */ PreviewImage *BKE_asset_metadata_preview_get_from_id(const AssetMetaData *UNUSED(asset_data), diff --git a/source/blender/blenkernel/intern/asset_catalog.cc b/source/blender/blenkernel/intern/asset_catalog.cc new file mode 100644 index 00000000000..2c7cf28d60d --- /dev/null +++ b/source/blender/blenkernel/intern/asset_catalog.cc @@ -0,0 +1,791 @@ +/* + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +/** \file + * \ingroup bke + */ + +#include "BKE_asset_catalog.hh" +#include "BKE_asset_library.h" +#include "BKE_preferences.h" + +#include "BLI_fileops.h" +#include "BLI_path_util.h" +#include "BLI_string_ref.hh" + +#include "DNA_userdef_types.h" + +/* For S_ISREG() and S_ISDIR() on Windows. */ +#ifdef WIN32 +# include "BLI_winstuff.h" +#endif + +#include <fstream> +#include <set> + +namespace blender::bke { + +const CatalogFilePath AssetCatalogService::DEFAULT_CATALOG_FILENAME = "blender_assets.cats.txt"; + +/* For now this is the only version of the catalog definition files that is supported. + * Later versioning code may be added to handle older files. */ +const int AssetCatalogDefinitionFile::SUPPORTED_VERSION = 1; +/* String that's matched in the catalog definition file to know that the line is the version + * declaration. It has to start with a space to ensure it won't match any hypothetical future field + * that starts with "VERSION". */ +const std::string AssetCatalogDefinitionFile::VERSION_MARKER = "VERSION "; + +const std::string AssetCatalogDefinitionFile::HEADER = + "# This is an Asset Catalog Definition file for Blender.\n" + "#\n" + "# Empty lines and lines starting with `#` will be ignored.\n" + "# The first non-ignored line should be the version indicator.\n" + "# Other lines are of the format \"UUID:catalog/path/for/assets:simple catalog name\"\n"; + +AssetCatalogService::AssetCatalogService(const CatalogFilePath &asset_library_root) + : asset_library_root_(asset_library_root) +{ +} + +bool AssetCatalogService::is_empty() const +{ + return catalogs_.is_empty(); +} + +AssetCatalog *AssetCatalogService::find_catalog(CatalogID catalog_id) const +{ + const std::unique_ptr<AssetCatalog> *catalog_uptr_ptr = this->catalogs_.lookup_ptr(catalog_id); + if (catalog_uptr_ptr == nullptr) { + return nullptr; + } + return catalog_uptr_ptr->get(); +} + +AssetCatalog *AssetCatalogService::find_catalog_by_path(const AssetCatalogPath &path) const +{ + for (const auto &catalog : catalogs_.values()) { + if (catalog->path == path) { + return catalog.get(); + } + } + + return nullptr; +} + +AssetCatalogFilter AssetCatalogService::create_catalog_filter( + const CatalogID active_catalog_id) const +{ + Set<CatalogID> matching_catalog_ids; + matching_catalog_ids.add(active_catalog_id); + + const AssetCatalog *active_catalog = find_catalog(active_catalog_id); + if (!active_catalog) { + /* If the UUID is unknown (i.e. not mapped to an actual Catalog), it is impossible to determine + * its children. The filter can still work on the given UUID. */ + return AssetCatalogFilter(std::move(matching_catalog_ids)); + } + + /* This cannot just iterate over tree items to get all the required data, because tree items only + * represent single UUIDs. It could be used to get the main UUIDs of the children, though, and + * then only do an exact match on the path (instead of the more complex `is_contained_in()` + * call). Without an extra indexed-by-path acceleration structure, this is still going to require + * a linear search, though. */ + for (const auto &catalog_uptr : this->catalogs_.values()) { + if (catalog_uptr->path.is_contained_in(active_catalog->path)) { + matching_catalog_ids.add(catalog_uptr->catalog_id); + } + } + + return AssetCatalogFilter(std::move(matching_catalog_ids)); +} + +void AssetCatalogService::delete_catalog(CatalogID catalog_id) +{ + std::unique_ptr<AssetCatalog> *catalog_uptr_ptr = this->catalogs_.lookup_ptr(catalog_id); + if (catalog_uptr_ptr == nullptr) { + /* Catalog cannot be found, which is fine. */ + return; + } + + /* Mark the catalog as deleted. */ + AssetCatalog *catalog = catalog_uptr_ptr->get(); + catalog->flags.is_deleted = true; + + /* Move ownership from this->catalogs_ to this->deleted_catalogs_. */ + this->deleted_catalogs_.add(catalog_id, std::move(*catalog_uptr_ptr)); + + /* The catalog can now be removed from the map without freeing the actual AssetCatalog. */ + this->catalogs_.remove(catalog_id); + + this->rebuild_tree(); +} + +void AssetCatalogService::update_catalog_path(CatalogID catalog_id, + const AssetCatalogPath &new_catalog_path) +{ + AssetCatalog *renamed_cat = this->find_catalog(catalog_id); + const AssetCatalogPath old_cat_path = renamed_cat->path; + + for (auto &catalog_uptr : catalogs_.values()) { + AssetCatalog *cat = catalog_uptr.get(); + + const AssetCatalogPath new_path = cat->path.rebase(old_cat_path, new_catalog_path); + if (!new_path) { + continue; + } + cat->path = new_path; + } + + this->rebuild_tree(); +} + +AssetCatalog *AssetCatalogService::create_catalog(const AssetCatalogPath &catalog_path) +{ + std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path(catalog_path); + + /* So we can std::move(catalog) and still use the non-owning pointer: */ + AssetCatalog *const catalog_ptr = catalog.get(); + + /* TODO(@sybren): move the `AssetCatalog::from_path()` function to another place, that can reuse + * catalogs when a catalog with the given path is already known, and avoid duplicate catalog IDs. + */ + BLI_assert_msg(!catalogs_.contains(catalog->catalog_id), "duplicate catalog ID not supported"); + catalogs_.add_new(catalog->catalog_id, std::move(catalog)); + + if (catalog_definition_file_) { + /* Ensure the new catalog gets written to disk at some point. If there is no CDF in memory yet, + * it's enough to have the catalog known to the service as it'll be saved to a new file. */ + catalog_definition_file_->add_new(catalog_ptr); + } + + BLI_assert_msg(catalog_tree_, "An Asset Catalog tree should always exist."); + catalog_tree_->insert_item(*catalog_ptr); + + return catalog_ptr; +} + +static std::string asset_definition_default_file_path_from_dir(StringRef asset_library_root) +{ + char file_path[PATH_MAX]; + BLI_join_dirfile(file_path, + sizeof(file_path), + asset_library_root.data(), + AssetCatalogService::DEFAULT_CATALOG_FILENAME.data()); + return file_path; +} + +void AssetCatalogService::load_from_disk() +{ + load_from_disk(asset_library_root_); +} + +void AssetCatalogService::load_from_disk(const CatalogFilePath &file_or_directory_path) +{ + BLI_stat_t status; + if (BLI_stat(file_or_directory_path.data(), &status) == -1) { + // TODO(@sybren): throw an appropriate exception. + return; + } + + if (S_ISREG(status.st_mode)) { + load_single_file(file_or_directory_path); + } + else if (S_ISDIR(status.st_mode)) { + load_directory_recursive(file_or_directory_path); + } + else { + // TODO(@sybren): throw an appropriate exception. + } + + /* TODO: Should there be a sanitize step? E.g. to remove catalogs with identical paths? */ + + rebuild_tree(); +} + +void AssetCatalogService::load_directory_recursive(const CatalogFilePath &directory_path) +{ + // TODO(@sybren): implement proper multi-file support. For now, just load + // the default file if it is there. + CatalogFilePath file_path = asset_definition_default_file_path_from_dir(directory_path); + + if (!BLI_exists(file_path.data())) { + /* No file to be loaded is perfectly fine. */ + return; + } + + this->load_single_file(file_path); +} + +void AssetCatalogService::load_single_file(const CatalogFilePath &catalog_definition_file_path) +{ + /* TODO(@sybren): check that #catalog_definition_file_path is contained in #asset_library_root_, + * otherwise some assumptions may fail. */ + std::unique_ptr<AssetCatalogDefinitionFile> cdf = parse_catalog_file( + catalog_definition_file_path); + + BLI_assert_msg(!this->catalog_definition_file_, + "Only loading of a single catalog definition file is supported."); + this->catalog_definition_file_ = std::move(cdf); +} + +std::unique_ptr<AssetCatalogDefinitionFile> AssetCatalogService::parse_catalog_file( + const CatalogFilePath &catalog_definition_file_path) +{ + auto cdf = std::make_unique<AssetCatalogDefinitionFile>(); + cdf->file_path = catalog_definition_file_path; + + auto catalog_parsed_callback = [this, catalog_definition_file_path]( + std::unique_ptr<AssetCatalog> catalog) { + if (this->catalogs_.contains(catalog->catalog_id)) { + // TODO(@sybren): apparently another CDF was already loaded. This is not supported yet. + std::cerr << catalog_definition_file_path << ": multiple definitions of catalog " + << catalog->catalog_id << " in multiple files, ignoring this one." << std::endl; + /* Don't store 'catalog'; unique_ptr will free its memory. */ + return false; + } + + /* The AssetCatalog pointer is now owned by the AssetCatalogService. */ + this->catalogs_.add_new(catalog->catalog_id, std::move(catalog)); + return true; + }; + + cdf->parse_catalog_file(cdf->file_path, catalog_parsed_callback); + + return cdf; +} + +void AssetCatalogService::merge_from_disk_before_writing() +{ + /* TODO(Sybren): expand to support multiple CDFs. */ + + if (!catalog_definition_file_ || catalog_definition_file_->file_path.empty() || + !BLI_is_file(catalog_definition_file_->file_path.c_str())) { + return; + } + + auto catalog_parsed_callback = [this](std::unique_ptr<AssetCatalog> catalog) { + const bUUID catalog_id = catalog->catalog_id; + + /* The following two conditions could be or'ed together. Keeping them separated helps when + * adding debug prints, breakpoints, etc. */ + if (this->catalogs_.contains(catalog_id)) { + /* This catalog was already seen, so just ignore it. */ + return false; + } + if (this->deleted_catalogs_.contains(catalog_id)) { + /* This catalog was already seen and subsequently deleted, so just ignore it. */ + return false; + } + + /* This is a new catalog, so let's keep it around. */ + this->catalogs_.add_new(catalog_id, std::move(catalog)); + return true; + }; + + catalog_definition_file_->parse_catalog_file(catalog_definition_file_->file_path, + catalog_parsed_callback); +} + +bool AssetCatalogService::write_to_disk_on_blendfile_save(const CatalogFilePath &blend_file_path) +{ + /* TODO(Sybren): expand to support multiple CDFs. */ + + /* - Already loaded a CDF from disk? -> Always write to that file. */ + if (this->catalog_definition_file_) { + merge_from_disk_before_writing(); + return catalog_definition_file_->write_to_disk(); + } + + if (catalogs_.is_empty() && deleted_catalogs_.is_empty()) { + /* Avoid saving anything, when there is nothing to save. */ + return true; /* Writing nothing when there is nothing to write is still a success. */ + } + + const CatalogFilePath cdf_path_to_write = find_suitable_cdf_path_for_writing(blend_file_path); + this->catalog_definition_file_ = construct_cdf_in_memory(cdf_path_to_write); + merge_from_disk_before_writing(); + return catalog_definition_file_->write_to_disk(); +} + +CatalogFilePath AssetCatalogService::find_suitable_cdf_path_for_writing( + const CatalogFilePath &blend_file_path) +{ + BLI_assert_msg(!blend_file_path.empty(), + "A non-empty .blend file path is required to be able to determine where the " + "catalog definition file should be put"); + + /* Determine the default CDF path in the same directory of the blend file. */ + char blend_dir_path[PATH_MAX]; + BLI_split_dir_part(blend_file_path.c_str(), blend_dir_path, sizeof(blend_dir_path)); + const CatalogFilePath cdf_path_next_to_blend = asset_definition_default_file_path_from_dir( + blend_dir_path); + + if (BLI_exists(cdf_path_next_to_blend.c_str())) { + /* - The directory containing the blend file has a blender_assets.cats.txt file? + * -> Merge with & write to that file. */ + return cdf_path_next_to_blend; + } + + /* - There's no definition file next to the .blend file. + * -> Ask the asset library API for an appropriate location. */ + char suitable_root_path[PATH_MAX]; + BKE_asset_library_find_suitable_root_path_from_path(blend_file_path.c_str(), suitable_root_path); + char asset_lib_cdf_path[PATH_MAX]; + BLI_path_join(asset_lib_cdf_path, + sizeof(asset_lib_cdf_path), + suitable_root_path, + DEFAULT_CATALOG_FILENAME.c_str(), + NULL); + + return asset_lib_cdf_path; +} + +std::unique_ptr<AssetCatalogDefinitionFile> AssetCatalogService::construct_cdf_in_memory( + const CatalogFilePath &file_path) +{ + auto cdf = std::make_unique<AssetCatalogDefinitionFile>(); + cdf->file_path = file_path; + + for (auto &catalog : catalogs_.values()) { + cdf->add_new(catalog.get()); + } + + return cdf; +} + +std::unique_ptr<AssetCatalogTree> AssetCatalogService::read_into_tree() +{ + auto tree = std::make_unique<AssetCatalogTree>(); + + /* Go through the catalogs, insert each path component into the tree where needed. */ + for (auto &catalog : catalogs_.values()) { + tree->insert_item(*catalog); + } + + return tree; +} + +void AssetCatalogService::rebuild_tree() +{ + create_missing_catalogs(); + this->catalog_tree_ = read_into_tree(); +} + +void AssetCatalogService::create_missing_catalogs() +{ + /* Construct an ordered set of paths to check, so that parents are ordered before children. */ + std::set<AssetCatalogPath> paths_to_check; + for (auto &catalog : catalogs_.values()) { + paths_to_check.insert(catalog->path); + } + + std::set<AssetCatalogPath> seen_paths; + /* The empty parent should never be created, so always be considered "seen". */ + seen_paths.insert(AssetCatalogPath("")); + + /* Find and create missing direct parents (so ignoring parents-of-parents). */ + while (!paths_to_check.empty()) { + /* Pop the first path of the queue. */ + const AssetCatalogPath path = *paths_to_check.begin(); + paths_to_check.erase(paths_to_check.begin()); + + if (seen_paths.find(path) != seen_paths.end()) { + /* This path has been seen already, so it can be ignored. */ + continue; + } + seen_paths.insert(path); + + const AssetCatalogPath parent_path = path.parent(); + if (seen_paths.find(parent_path) != seen_paths.end()) { + /* The parent exists, continue to the next path. */ + continue; + } + + /* The parent doesn't exist, so create it and queue it up for checking its parent. */ + create_catalog(parent_path); + paths_to_check.insert(parent_path); + } + + /* TODO(Sybren): bind the newly created catalogs to a CDF, if we know about it. */ +} + +/* ---------------------------------------------------------------------- */ + +AssetCatalogTreeItem::AssetCatalogTreeItem(StringRef name, + CatalogID catalog_id, + const AssetCatalogTreeItem *parent) + : name_(name), catalog_id_(catalog_id), parent_(parent) +{ +} + +CatalogID AssetCatalogTreeItem::get_catalog_id() const +{ + return catalog_id_; +} + +StringRef AssetCatalogTreeItem::get_name() const +{ + return name_; +} + +AssetCatalogPath AssetCatalogTreeItem::catalog_path() const +{ + AssetCatalogPath current_path = name_; + for (const AssetCatalogTreeItem *parent = parent_; parent; parent = parent->parent_) { + current_path = AssetCatalogPath(parent->name_) / current_path; + } + return current_path; +} + +int AssetCatalogTreeItem::count_parents() const +{ + int i = 0; + for (const AssetCatalogTreeItem *parent = parent_; parent; parent = parent->parent_) { + i++; + } + return i; +} + +bool AssetCatalogTreeItem::has_children() const +{ + return !children_.empty(); +} + +/* ---------------------------------------------------------------------- */ + +void AssetCatalogTree::insert_item(const AssetCatalog &catalog) +{ + const AssetCatalogTreeItem *parent = nullptr; + /* The children for the currently iterated component, where the following component should be + * added to (if not there yet). */ + AssetCatalogTreeItem::ChildMap *current_item_children = &root_items_; + + BLI_assert_msg(!ELEM(catalog.path.str()[0], '/', '\\'), + "Malformed catalog path; should not start with a separator"); + + const CatalogID nil_id{}; + + catalog.path.iterate_components([&](StringRef component_name, const bool is_last_component) { + /* Insert new tree element - if no matching one is there yet! */ + auto [key_and_item, was_inserted] = current_item_children->emplace( + component_name, + AssetCatalogTreeItem( + component_name, is_last_component ? catalog.catalog_id : nil_id, parent)); + AssetCatalogTreeItem &item = key_and_item->second; + + /* If full path of this catalog already exists as parent path of a previously read catalog, + * we can ensure this tree item's UUID is set here. */ + if (is_last_component && BLI_uuid_is_nil(item.catalog_id_)) { + item.catalog_id_ = catalog.catalog_id; + } + + /* Walk further into the path (no matter if a new item was created or not). */ + parent = &item; + current_item_children = &item.children_; + }); +} + +void AssetCatalogTree::foreach_item(AssetCatalogTreeItem::ItemIterFn callback) +{ + AssetCatalogTreeItem::foreach_item_recursive(root_items_, callback); +} + +void AssetCatalogTreeItem::foreach_item_recursive(AssetCatalogTreeItem::ChildMap &children, + const ItemIterFn callback) +{ + for (auto &[key, item] : children) { + callback(item); + foreach_item_recursive(item.children_, callback); + } +} + +void AssetCatalogTree::foreach_root_item(const ItemIterFn callback) +{ + for (auto &[key, item] : root_items_) { + callback(item); + } +} + +void AssetCatalogTreeItem::foreach_child(const ItemIterFn callback) +{ + for (auto &[key, item] : children_) { + callback(item); + } +} + +AssetCatalogTree *AssetCatalogService::get_catalog_tree() +{ + return catalog_tree_.get(); +} + +bool AssetCatalogDefinitionFile::contains(const CatalogID catalog_id) const +{ + return catalogs_.contains(catalog_id); +} + +void AssetCatalogDefinitionFile::add_new(AssetCatalog *catalog) +{ + catalogs_.add_new(catalog->catalog_id, catalog); +} + +void AssetCatalogDefinitionFile::parse_catalog_file( + const CatalogFilePath &catalog_definition_file_path, + AssetCatalogParsedFn catalog_loaded_callback) +{ + std::fstream infile(catalog_definition_file_path); + + bool seen_version_number = false; + std::string line; + while (std::getline(infile, line)) { + const StringRef trimmed_line = StringRef(line).trim(); + if (trimmed_line.is_empty() || trimmed_line[0] == '#') { + continue; + } + + if (!seen_version_number) { + /* The very first non-ignored line should be the version declaration. */ + const bool is_valid_version = this->parse_version_line(trimmed_line); + if (!is_valid_version) { + std::cerr << catalog_definition_file_path + << ": first line should be version declaration; ignoring file." << std::endl; + break; + } + seen_version_number = true; + continue; + } + + std::unique_ptr<AssetCatalog> catalog = this->parse_catalog_line(trimmed_line); + if (!catalog) { + continue; + } + + AssetCatalog *non_owning_ptr = catalog.get(); + const bool keep_catalog = catalog_loaded_callback(std::move(catalog)); + if (!keep_catalog) { + continue; + } + + if (this->contains(non_owning_ptr->catalog_id)) { + std::cerr << catalog_definition_file_path << ": multiple definitions of catalog " + << non_owning_ptr->catalog_id << " in the same file, using first occurrence." + << std::endl; + /* Don't store 'catalog'; unique_ptr will free its memory. */ + continue; + } + + /* The AssetDefinitionFile should include this catalog when writing it back to disk. */ + this->add_new(non_owning_ptr); + } +} + +bool AssetCatalogDefinitionFile::parse_version_line(const StringRef line) +{ + if (!line.startswith(VERSION_MARKER)) { + return false; + } + + const std::string version_string = line.substr(VERSION_MARKER.length()); + const int file_version = std::atoi(version_string.c_str()); + + /* No versioning, just a blunt check whether it's the right one. */ + return file_version == SUPPORTED_VERSION; +} + +std::unique_ptr<AssetCatalog> AssetCatalogDefinitionFile::parse_catalog_line(const StringRef line) +{ + const char delim = ':'; + const int64_t first_delim = line.find_first_of(delim); + if (first_delim == StringRef::not_found) { + std::cerr << "Invalid catalog line in " << this->file_path << ": " << line << std::endl; + return std::unique_ptr<AssetCatalog>(nullptr); + } + + /* Parse the catalog ID. */ + const std::string id_as_string = line.substr(0, first_delim).trim(); + bUUID catalog_id; + const bool uuid_parsed_ok = BLI_uuid_parse_string(&catalog_id, id_as_string.c_str()); + if (!uuid_parsed_ok) { + std::cerr << "Invalid UUID in " << this->file_path << ": " << line << std::endl; + return std::unique_ptr<AssetCatalog>(nullptr); + } + + /* Parse the path and simple name. */ + const StringRef path_and_simple_name = line.substr(first_delim + 1); + const int64_t second_delim = path_and_simple_name.find_first_of(delim); + + std::string path_in_file; + std::string simple_name; + if (second_delim == 0) { + /* Delimiter as first character means there is no path. These lines are to be ignored. */ + return std::unique_ptr<AssetCatalog>(nullptr); + } + + if (second_delim == StringRef::not_found) { + /* No delimiter means no simple name, just treat it as all "path". */ + path_in_file = path_and_simple_name; + simple_name = ""; + } + else { + path_in_file = path_and_simple_name.substr(0, second_delim); + simple_name = path_and_simple_name.substr(second_delim + 1).trim(); + } + + AssetCatalogPath catalog_path = path_in_file; + return std::make_unique<AssetCatalog>(catalog_id, catalog_path.cleanup(), simple_name); +} + +bool AssetCatalogDefinitionFile::write_to_disk() const +{ + BLI_assert_msg(!this->file_path.empty(), "Writing to CDF requires its file path to be known"); + return this->write_to_disk(this->file_path); +} + +bool AssetCatalogDefinitionFile::write_to_disk(const CatalogFilePath &dest_file_path) const +{ + const CatalogFilePath writable_path = dest_file_path + ".writing"; + const CatalogFilePath backup_path = dest_file_path + "~"; + + if (!this->write_to_disk_unsafe(writable_path)) { + /* TODO: communicate what went wrong. */ + return false; + } + if (BLI_exists(dest_file_path.c_str())) { + if (BLI_rename(dest_file_path.c_str(), backup_path.c_str())) { + /* TODO: communicate what went wrong. */ + return false; + } + } + if (BLI_rename(writable_path.c_str(), dest_file_path.c_str())) { + /* TODO: communicate what went wrong. */ + return false; + } + + return true; +} + +bool AssetCatalogDefinitionFile::write_to_disk_unsafe(const CatalogFilePath &dest_file_path) const +{ + char directory[PATH_MAX]; + BLI_split_dir_part(dest_file_path.c_str(), directory, sizeof(directory)); + if (!ensure_directory_exists(directory)) { + /* TODO(Sybren): pass errors to the UI somehow. */ + return false; + } + + std::ofstream output(dest_file_path); + + // TODO(@sybren): remember the line ending style that was originally read, then use that to write + // the file again. + + // Write the header. + output << HEADER; + output << "" << std::endl; + output << VERSION_MARKER << SUPPORTED_VERSION << std::endl; + output << "" << std::endl; + + // Write the catalogs, ordered by path (primary) and UUID (secondary). + AssetCatalogOrderedSet catalogs_by_path; + for (const AssetCatalog *catalog : catalogs_.values()) { + if (catalog->flags.is_deleted) { + continue; + } + catalogs_by_path.insert(catalog); + } + + for (const AssetCatalog *catalog : catalogs_by_path) { + output << catalog->catalog_id << ":" << catalog->path << ":" << catalog->simple_name + << std::endl; + } + output.close(); + return !output.bad(); +} + +bool AssetCatalogDefinitionFile::ensure_directory_exists( + const CatalogFilePath directory_path) const +{ + /* TODO(@sybren): design a way to get such errors presented to users (or ensure that they never + * occur). */ + if (directory_path.empty()) { + std::cerr + << "AssetCatalogService: no asset library root configured, unable to ensure it exists." + << std::endl; + return false; + } + + if (BLI_exists(directory_path.data())) { + if (!BLI_is_dir(directory_path.data())) { + std::cerr << "AssetCatalogService: " << directory_path + << " exists but is not a directory, this is not a supported situation." + << std::endl; + return false; + } + + /* Root directory exists, work is done. */ + return true; + } + + /* Ensure the root directory exists. */ + std::error_code err_code; + if (!BLI_dir_create_recursive(directory_path.data())) { + std::cerr << "AssetCatalogService: error creating directory " << directory_path << ": " + << err_code << std::endl; + return false; + } + + /* Root directory has been created, work is done. */ + return true; +} + +AssetCatalog::AssetCatalog(const CatalogID catalog_id, + const AssetCatalogPath &path, + const std::string &simple_name) + : catalog_id(catalog_id), path(path), simple_name(simple_name) +{ +} + +std::unique_ptr<AssetCatalog> AssetCatalog::from_path(const AssetCatalogPath &path) +{ + const AssetCatalogPath clean_path = path.cleanup(); + const CatalogID cat_id = BLI_uuid_generate_random(); + const std::string simple_name = sensible_simple_name_for_path(clean_path); + auto catalog = std::make_unique<AssetCatalog>(cat_id, clean_path, simple_name); + return catalog; +} + +std::string AssetCatalog::sensible_simple_name_for_path(const AssetCatalogPath &path) +{ + std::string name = path.str(); + std::replace(name.begin(), name.end(), AssetCatalogPath::SEPARATOR, '-'); + if (name.length() < MAX_NAME - 1) { + return name; + } + + /* Trim off the start of the path, as that's the most generic part and thus contains the least + * information. */ + return "..." + name.substr(name.length() - 60); +} + +AssetCatalogFilter::AssetCatalogFilter(Set<CatalogID> &&matching_catalog_ids) + : matching_catalog_ids(std::move(matching_catalog_ids)) +{ +} + +bool AssetCatalogFilter::contains(const CatalogID asset_catalog_id) const +{ + return matching_catalog_ids.contains(asset_catalog_id); +} + +} // namespace blender::bke diff --git a/source/blender/blenkernel/intern/asset_catalog_path.cc b/source/blender/blenkernel/intern/asset_catalog_path.cc new file mode 100644 index 00000000000..85b8969cb8c --- /dev/null +++ b/source/blender/blenkernel/intern/asset_catalog_path.cc @@ -0,0 +1,228 @@ +/* + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +/** \file + * \ingroup bke + */ + +#include "BKE_asset_catalog_path.hh" + +#include "BLI_path_util.h" + +namespace blender::bke { + +const char AssetCatalogPath::SEPARATOR = '/'; + +AssetCatalogPath::AssetCatalogPath(const std::string &path) : path_(path) +{ +} + +AssetCatalogPath::AssetCatalogPath(StringRef path) : path_(path) +{ +} + +AssetCatalogPath::AssetCatalogPath(const char *path) : path_(path) +{ +} + +AssetCatalogPath::AssetCatalogPath(AssetCatalogPath &&other_path) noexcept + : path_(std::move(other_path.path_)) +{ +} + +uint64_t AssetCatalogPath::hash() const +{ + std::hash<std::string> hasher{}; + return hasher(this->path_); +} + +uint64_t AssetCatalogPath::length() const +{ + return this->path_.length(); +} + +const char *AssetCatalogPath::c_str() const +{ + return this->path_.c_str(); +} + +const std::string &AssetCatalogPath::str() const +{ + return this->path_; +} + +/* In-class operators, because of the implicit `AssetCatalogPath(StringRef)` constructor. + * Otherwise `string == string` could cast both sides to `AssetCatalogPath`. */ +bool AssetCatalogPath::operator==(const AssetCatalogPath &other_path) const +{ + return this->path_ == other_path.path_; +} + +bool AssetCatalogPath::operator!=(const AssetCatalogPath &other_path) const +{ + return !(*this == other_path); +} + +bool AssetCatalogPath::operator<(const AssetCatalogPath &other_path) const +{ + return this->path_ < other_path.path_; +} + +AssetCatalogPath AssetCatalogPath::operator/(const AssetCatalogPath &path_to_append) const +{ + /* `"" / "path"` or `"path" / ""` should just result in `"path"` */ + if (!*this) { + return path_to_append; + } + if (!path_to_append) { + return *this; + } + + std::stringstream new_path; + new_path << this->path_ << SEPARATOR << path_to_append.path_; + return AssetCatalogPath(new_path.str()); +} + +AssetCatalogPath::operator bool() const +{ + return !this->path_.empty(); +} + +std::ostream &operator<<(std::ostream &stream, const AssetCatalogPath &path_to_append) +{ + stream << path_to_append.path_; + return stream; +} + +AssetCatalogPath AssetCatalogPath::cleanup() const +{ + std::stringstream clean_components; + bool first_component_seen = false; + + this->iterate_components([&clean_components, &first_component_seen](StringRef component_name, + bool /*is_last_component*/) { + const std::string clean_component = cleanup_component(component_name); + + if (clean_component.empty()) { + /* These are caused by leading, trailing, or double slashes. */ + return; + } + + /* If a previous path component has been streamed already, we need a path separator. This + * cannot use the `is_last_component` boolean, because the last component might be skipped due + * to the condition above. */ + if (first_component_seen) { + clean_components << SEPARATOR; + } + first_component_seen = true; + + clean_components << clean_component; + }); + + return AssetCatalogPath(clean_components.str()); +} + +std::string AssetCatalogPath::cleanup_component(StringRef component) +{ + std::string cleaned = component.trim(); + /* Replace colons with something else, as those are used in the CDF file as delimiter. */ + std::replace(cleaned.begin(), cleaned.end(), ':', '-'); + return cleaned; +} + +bool AssetCatalogPath::is_contained_in(const AssetCatalogPath &other_path) const +{ + if (!other_path) { + /* The empty path contains all other paths. */ + return true; + } + + if (this->path_ == other_path.path_) { + /* Weak is-in relation: equal paths contain each other. */ + return true; + } + + /* To be a child path of 'other_path', our path must be at least a separator and another + * character longer. */ + if (this->length() < other_path.length() + 2) { + return false; + } + + /* Create StringRef to be able to use .startswith(). */ + const StringRef this_path(this->path_); + const bool prefix_ok = this_path.startswith(other_path.path_); + const char next_char = this_path[other_path.length()]; + return prefix_ok && next_char == SEPARATOR; +} + +AssetCatalogPath AssetCatalogPath::parent() const +{ + if (!*this) { + return AssetCatalogPath(""); + } + std::string::size_type last_sep_index = this->path_.rfind(SEPARATOR); + if (last_sep_index == std::string::npos) { + return AssetCatalogPath(""); + } + return AssetCatalogPath(this->path_.substr(0, last_sep_index)); +} + +void AssetCatalogPath::iterate_components(ComponentIteratorFn callback) const +{ + const char *next_slash_ptr; + + for (const char *path_component = this->path_.data(); path_component && path_component[0]; + /* Jump to one after the next slash if there is any. */ + path_component = next_slash_ptr ? next_slash_ptr + 1 : nullptr) { + next_slash_ptr = BLI_path_slash_find(path_component); + + const bool is_last_component = next_slash_ptr == nullptr; + /* Note that this won't be null terminated. */ + const StringRef component_name = is_last_component ? + path_component : + StringRef(path_component, + next_slash_ptr - path_component); + + callback(component_name, is_last_component); + } +} + +AssetCatalogPath AssetCatalogPath::rebase(const AssetCatalogPath &from_path, + const AssetCatalogPath &to_path) const +{ + if (!from_path) { + if (!to_path) { + return AssetCatalogPath(""); + } + return to_path / *this; + } + + if (!this->is_contained_in(from_path)) { + return AssetCatalogPath(""); + } + + if (*this == from_path) { + /* Early return, because otherwise the length+1 below is going to cause problems. */ + return to_path; + } + + /* When from_path = "test", we need to skip "test/" to get the rest of the path, hence the +1. */ + const StringRef suffix = StringRef(this->path_).substr(from_path.length() + 1); + const AssetCatalogPath path_suffix(suffix); + return to_path / path_suffix; +} + +} // namespace blender::bke diff --git a/source/blender/blenkernel/intern/asset_catalog_path_test.cc b/source/blender/blenkernel/intern/asset_catalog_path_test.cc new file mode 100644 index 00000000000..af15cbf405a --- /dev/null +++ b/source/blender/blenkernel/intern/asset_catalog_path_test.cc @@ -0,0 +1,251 @@ +/* + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + * + * The Original Code is Copyright (C) 2020 Blender Foundation + * All rights reserved. + */ + +#include "BKE_asset_catalog_path.hh" + +#include "BLI_set.hh" +#include "BLI_vector.hh" + +#include <set> +#include <sstream> + +#include "testing/testing.h" + +namespace blender::bke::tests { + +TEST(AssetCatalogPathTest, construction) +{ + AssetCatalogPath from_char_literal("the/path"); + + const std::string str_const = "the/path"; + AssetCatalogPath from_string_constant(str_const); + + std::string str_variable = "the/path"; + AssetCatalogPath from_string_variable(str_variable); + + std::string long_string = "this is a long/string/with/a/path in the middle"; + StringRef long_string_ref(long_string); + StringRef middle_bit = long_string_ref.substr(10, 23); + AssetCatalogPath from_string_ref(middle_bit); + EXPECT_EQ(from_string_ref, "long/string/with/a/path"); +} + +TEST(AssetCatalogPathTest, length) +{ + const AssetCatalogPath one("1"); + EXPECT_EQ(1, one.length()); + + const AssetCatalogPath empty(""); + EXPECT_EQ(0, empty.length()); + + const AssetCatalogPath utf8("some/родитель"); + EXPECT_EQ(21, utf8.length()) << "13 characters should be 21 bytes."; +} + +TEST(AssetCatalogPathTest, comparison_operators) +{ + const AssetCatalogPath empty(""); + const AssetCatalogPath the_path("the/path"); + const AssetCatalogPath the_path_child("the/path/child"); + const AssetCatalogPath unrelated_path("unrelated/path"); + const AssetCatalogPath other_instance_same_path("the/path"); + + EXPECT_LT(empty, the_path); + EXPECT_LT(the_path, the_path_child); + EXPECT_LT(the_path, unrelated_path); + + EXPECT_EQ(empty, empty) << "Identical empty instances should compare equal."; + EXPECT_EQ(empty, "") << "Comparison to empty string should be possible."; + EXPECT_EQ(the_path, the_path) << "Identical non-empty instances should compare equal."; + EXPECT_EQ(the_path, "the/path") << "Comparison to string should be possible."; + EXPECT_EQ(the_path, other_instance_same_path) + << "Different instances with equal path should compare equal."; + + EXPECT_NE(the_path, the_path_child); + EXPECT_NE(the_path, unrelated_path); + EXPECT_NE(the_path, empty); + + EXPECT_FALSE(empty); + EXPECT_TRUE(the_path); +} + +TEST(AssetCatalogPathTest, move_semantics) +{ + AssetCatalogPath source_path("source/path"); + EXPECT_TRUE(source_path); + + AssetCatalogPath dest_path = std::move(source_path); + EXPECT_FALSE(source_path); + EXPECT_TRUE(dest_path); +} + +TEST(AssetCatalogPathTest, concatenation) +{ + AssetCatalogPath some_parent("some/родитель"); + AssetCatalogPath child = some_parent / "ребенок"; + + EXPECT_EQ(some_parent, "some/родитель") + << "Appending a child path should not modify the parent."; + EXPECT_EQ(child, "some/родитель/ребенок"); + + AssetCatalogPath appended_compound_path = some_parent / "ребенок/внук"; + EXPECT_EQ(appended_compound_path, "some/родитель/ребенок/внук"); + + AssetCatalogPath empty(""); + AssetCatalogPath child_of_the_void = empty / "child"; + EXPECT_EQ(child_of_the_void, "child") + << "Appending to an empty path should not create an initial slash."; + + AssetCatalogPath parent_of_the_void = some_parent / empty; + EXPECT_EQ(parent_of_the_void, "some/родитель") + << "Prepending to an empty path should not create a trailing slash."; + + std::string subpath = "child"; + AssetCatalogPath concatenated_with_string = some_parent / subpath; + EXPECT_EQ(concatenated_with_string, "some/родитель/child"); +} + +TEST(AssetCatalogPathTest, hashable) +{ + AssetCatalogPath path("heyyyyy"); + + std::set<AssetCatalogPath> path_std_set; + path_std_set.insert(path); + + blender::Set<AssetCatalogPath> path_blender_set; + path_blender_set.add(path); +} + +TEST(AssetCatalogPathTest, stream_operator) +{ + AssetCatalogPath path("путь/в/Пермь"); + std::stringstream sstream; + sstream << path; + EXPECT_EQ("путь/в/Пермь", sstream.str()); +} + +TEST(AssetCatalogPathTest, is_contained_in) +{ + const AssetCatalogPath catpath("simple/path/child"); + EXPECT_FALSE(catpath.is_contained_in("unrelated")); + EXPECT_FALSE(catpath.is_contained_in("sim")); + EXPECT_FALSE(catpath.is_contained_in("simple/pathx")); + EXPECT_FALSE(catpath.is_contained_in("simple/path/c")); + EXPECT_FALSE(catpath.is_contained_in("simple/path/child/grandchild")); + EXPECT_FALSE(catpath.is_contained_in("simple/path/")) + << "Non-normalized paths are not expected to work."; + + EXPECT_TRUE(catpath.is_contained_in("")); + EXPECT_TRUE(catpath.is_contained_in("simple")); + EXPECT_TRUE(catpath.is_contained_in("simple/path")); + + /* Test with some UTF8 non-ASCII characters. */ + AssetCatalogPath some_parent("some/родитель"); + AssetCatalogPath child = some_parent / "ребенок"; + + EXPECT_TRUE(child.is_contained_in(some_parent)); + EXPECT_TRUE(child.is_contained_in("some")); + + AssetCatalogPath appended_compound_path = some_parent / "ребенок/внук"; + EXPECT_TRUE(appended_compound_path.is_contained_in(some_parent)); + EXPECT_TRUE(appended_compound_path.is_contained_in(child)); + + /* Test "going up" directory-style. */ + AssetCatalogPath child_with_dotdot = some_parent / "../../other/hierarchy/part"; + EXPECT_TRUE(child_with_dotdot.is_contained_in(some_parent)) + << "dotdot path components should have no meaning"; +} + +TEST(AssetCatalogPathTest, cleanup) +{ + AssetCatalogPath ugly_path("/ some / родитель / "); + AssetCatalogPath clean_path = ugly_path.cleanup(); + + EXPECT_EQ(AssetCatalogPath("/ some / родитель / "), ugly_path) + << "cleanup should not modify the path instance itself"; + + EXPECT_EQ(AssetCatalogPath("some/родитель"), clean_path); + + AssetCatalogPath double_slashed("some//родитель"); + EXPECT_EQ(AssetCatalogPath("some/родитель"), double_slashed.cleanup()); + + AssetCatalogPath with_colons("some/key:subkey=value/path"); + EXPECT_EQ(AssetCatalogPath("some/key-subkey=value/path"), with_colons.cleanup()); +} + +TEST(AssetCatalogPathTest, iterate_components) +{ + AssetCatalogPath path("путь/в/Пермь"); + Vector<std::pair<std::string, bool>> seen_components; + + path.iterate_components([&seen_components](StringRef component_name, bool is_last_component) { + std::pair<std::string, bool> parameter_pair = std::make_pair<std::string, bool>( + component_name, bool(is_last_component)); + seen_components.append(parameter_pair); + }); + + ASSERT_EQ(3, seen_components.size()); + + EXPECT_EQ("путь", seen_components[0].first); + EXPECT_EQ("в", seen_components[1].first); + EXPECT_EQ("Пермь", seen_components[2].first); + + EXPECT_FALSE(seen_components[0].second); + EXPECT_FALSE(seen_components[1].second); + EXPECT_TRUE(seen_components[2].second); +} + +TEST(AssetCatalogPathTest, rebase) +{ + AssetCatalogPath path("some/path/to/some/catalog"); + EXPECT_EQ(path.rebase("some/path", "new/base"), "new/base/to/some/catalog"); + EXPECT_EQ(path.rebase("", "new/base"), "new/base/some/path/to/some/catalog"); + + EXPECT_EQ(path.rebase("some/path/to/some/catalog", "some/path/to/some/catalog"), + "some/path/to/some/catalog") + << "Rebasing to itself should not change the path."; + + EXPECT_EQ(path.rebase("path/to", "new/base"), "") + << "Non-matching base path should return empty string to indicate 'NO'."; + + /* Empty strings should be handled without crashing or other nasty side-effects. */ + AssetCatalogPath empty(""); + EXPECT_EQ(empty.rebase("path/to", "new/base"), ""); + EXPECT_EQ(empty.rebase("", "new/base"), "new/base"); + EXPECT_EQ(empty.rebase("", ""), ""); +} + +TEST(AssetCatalogPathTest, parent) +{ + const AssetCatalogPath ascii_path("path/with/missing/parents"); + EXPECT_EQ(ascii_path.parent(), "path/with/missing"); + + const AssetCatalogPath path("путь/в/Пермь/долог/и/далек"); + EXPECT_EQ(path.parent(), "путь/в/Пермь/долог/и"); + EXPECT_EQ(path.parent().parent(), "путь/в/Пермь/долог"); + EXPECT_EQ(path.parent().parent().parent(), "путь/в/Пермь"); + + const AssetCatalogPath one_level("one"); + EXPECT_EQ(one_level.parent(), ""); + + const AssetCatalogPath empty(""); + EXPECT_EQ(empty.parent(), ""); +} + +} // namespace blender::bke::tests diff --git a/source/blender/blenkernel/intern/asset_catalog_test.cc b/source/blender/blenkernel/intern/asset_catalog_test.cc new file mode 100644 index 00000000000..fb471a8ee7b --- /dev/null +++ b/source/blender/blenkernel/intern/asset_catalog_test.cc @@ -0,0 +1,965 @@ +/* + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + * + * The Original Code is Copyright (C) 2020 Blender Foundation + * All rights reserved. + */ + +#include "BKE_appdir.h" +#include "BKE_asset_catalog.hh" +#include "BKE_preferences.h" + +#include "BLI_fileops.h" +#include "BLI_path_util.h" + +#include "DNA_userdef_types.h" + +#include "testing/testing.h" + +namespace blender::bke::tests { + +/* UUIDs from lib/tests/asset_library/blender_assets.cats.txt */ +const bUUID UUID_ID_WITHOUT_PATH("e34dd2c5-5d2e-4668-9794-1db5de2a4f71"); +const bUUID UUID_POSES_ELLIE("df60e1f6-2259-475b-93d9-69a1b4a8db78"); +const bUUID UUID_POSES_ELLIE_WHITESPACE("b06132f6-5687-4751-a6dd-392740eb3c46"); +const bUUID UUID_POSES_ELLIE_TRAILING_SLASH("3376b94b-a28d-4d05-86c1-bf30b937130d"); +const bUUID UUID_POSES_RUZENA("79a4f887-ab60-4bd4-94da-d572e27d6aed"); +const bUUID UUID_POSES_RUZENA_HAND("81811c31-1a88-4bd7-bb34-c6fc2607a12e"); +const bUUID UUID_POSES_RUZENA_FACE("82162c1f-06cc-4d91-a9bf-4f72c104e348"); +const bUUID UUID_WITHOUT_SIMPLENAME("d7916a31-6ca9-4909-955f-182ca2b81fa3"); + +/* UUIDs from lib/tests/asset_library/modified_assets.cats.txt */ +const bUUID UUID_AGENT_47("c5744ba5-43f5-4f73-8e52-010ad4a61b34"); + +/* Subclass that adds accessors such that protected fields can be used in tests. */ +class TestableAssetCatalogService : public AssetCatalogService { + public: + TestableAssetCatalogService() = default; + + explicit TestableAssetCatalogService(const CatalogFilePath &asset_library_root) + : AssetCatalogService(asset_library_root) + { + } + + AssetCatalogDefinitionFile *get_catalog_definition_file() + { + return catalog_definition_file_.get(); + } + + void create_missing_catalogs() + { + AssetCatalogService::create_missing_catalogs(); + } + + int64_t count_catalogs_with_path(const CatalogFilePath &path) + { + int64_t count = 0; + for (auto &catalog_uptr : catalogs_.values()) { + if (catalog_uptr->path == path) { + count++; + } + } + return count; + } +}; + +class AssetCatalogTest : public testing::Test { + protected: + CatalogFilePath asset_library_root_; + CatalogFilePath temp_library_path_; + + void SetUp() override + { + const std::string test_files_dir = blender::tests::flags_test_asset_dir(); + if (test_files_dir.empty()) { + FAIL(); + } + + asset_library_root_ = test_files_dir + "/" + "asset_library"; + temp_library_path_ = ""; + } + + /* Register a temporary path, which will be removed at the end of the test. + * The returned path ends in a slash. */ + CatalogFilePath use_temp_path() + { + BKE_tempdir_init(""); + const CatalogFilePath tempdir = BKE_tempdir_session(); + temp_library_path_ = tempdir + "test-temporary-path/"; + return temp_library_path_; + } + + CatalogFilePath create_temp_path() + { + CatalogFilePath path = use_temp_path(); + BLI_dir_create_recursive(path.c_str()); + return path; + } + + struct CatalogPathInfo { + StringRef name; + int parent_count; + }; + + void assert_expected_item(const CatalogPathInfo &expected_path, + const AssetCatalogTreeItem &actual_item) + { + char expected_filename[FILE_MAXFILE]; + /* Is the catalog name as expected? "character", "Ellie", ... */ + BLI_split_file_part(expected_path.name.data(), expected_filename, sizeof(expected_filename)); + EXPECT_EQ(expected_filename, actual_item.get_name()); + /* Does the computed number of parents match? */ + EXPECT_EQ(expected_path.parent_count, actual_item.count_parents()); + EXPECT_EQ(expected_path.name, actual_item.catalog_path().str()); + } + + /** + * Recursively iterate over all tree items using #AssetCatalogTree::foreach_item() and check if + * the items map exactly to \a expected_paths. + */ + void assert_expected_tree_items(AssetCatalogTree *tree, + const std::vector<CatalogPathInfo> &expected_paths) + { + int i = 0; + tree->foreach_item([&](const AssetCatalogTreeItem &actual_item) { + ASSERT_LT(i, expected_paths.size()) + << "More catalogs in tree than expected; did not expect " << actual_item.catalog_path(); + assert_expected_item(expected_paths[i], actual_item); + i++; + }); + } + + /** + * Iterate over the root items of \a tree and check if the items map exactly to \a + * expected_paths. Similar to #assert_expected_tree_items() but calls + * #AssetCatalogTree::foreach_root_item() instead of #AssetCatalogTree::foreach_item(). + */ + void assert_expected_tree_root_items(AssetCatalogTree *tree, + const std::vector<CatalogPathInfo> &expected_paths) + { + int i = 0; + tree->foreach_root_item([&](const AssetCatalogTreeItem &actual_item) { + ASSERT_LT(i, expected_paths.size()) + << "More catalogs in tree root than expected; did not expect " + << actual_item.catalog_path(); + assert_expected_item(expected_paths[i], actual_item); + i++; + }); + } + + /** + * Iterate over the child items of \a parent_item and check if the items map exactly to \a + * expected_paths. Similar to #assert_expected_tree_items() but calls + * #AssetCatalogTreeItem::foreach_child() instead of #AssetCatalogTree::foreach_item(). + */ + void assert_expected_tree_item_child_items(AssetCatalogTreeItem *parent_item, + const std::vector<CatalogPathInfo> &expected_paths) + { + int i = 0; + parent_item->foreach_child([&](const AssetCatalogTreeItem &actual_item) { + ASSERT_LT(i, expected_paths.size()) + << "More catalogs in tree item than expected; did not expect " + << actual_item.catalog_path(); + assert_expected_item(expected_paths[i], actual_item); + i++; + }); + } + + void TearDown() override + { + if (!temp_library_path_.empty()) { + BLI_delete(temp_library_path_.c_str(), true, true); + temp_library_path_ = ""; + } + } +}; + +TEST_F(AssetCatalogTest, load_single_file) +{ + AssetCatalogService service(asset_library_root_); + service.load_from_disk(asset_library_root_ + "/" + "blender_assets.cats.txt"); + + /* Test getting a non-existent catalog ID. */ + EXPECT_EQ(nullptr, service.find_catalog(BLI_uuid_generate_random())); + + /* Test getting an invalid catalog (without path definition). */ + AssetCatalog *cat_without_path = service.find_catalog(UUID_ID_WITHOUT_PATH); + ASSERT_EQ(nullptr, cat_without_path); + + /* Test getting a regular catalog. */ + AssetCatalog *poses_ellie = service.find_catalog(UUID_POSES_ELLIE); + ASSERT_NE(nullptr, poses_ellie); + EXPECT_EQ(UUID_POSES_ELLIE, poses_ellie->catalog_id); + EXPECT_EQ("character/Ellie/poselib", poses_ellie->path.str()); + EXPECT_EQ("POSES_ELLIE", poses_ellie->simple_name); + + /* Test white-space stripping and support in the path. */ + AssetCatalog *poses_whitespace = service.find_catalog(UUID_POSES_ELLIE_WHITESPACE); + ASSERT_NE(nullptr, poses_whitespace); + EXPECT_EQ(UUID_POSES_ELLIE_WHITESPACE, poses_whitespace->catalog_id); + EXPECT_EQ("character/Ellie/poselib/white space", poses_whitespace->path.str()); + EXPECT_EQ("POSES_ELLIE WHITESPACE", poses_whitespace->simple_name); + + /* Test getting a UTF-8 catalog ID. */ + AssetCatalog *poses_ruzena = service.find_catalog(UUID_POSES_RUZENA); + ASSERT_NE(nullptr, poses_ruzena); + EXPECT_EQ(UUID_POSES_RUZENA, poses_ruzena->catalog_id); + EXPECT_EQ("character/Ružena/poselib", poses_ruzena->path.str()); + EXPECT_EQ("POSES_RUŽENA", poses_ruzena->simple_name); +} + +TEST_F(AssetCatalogTest, insert_item_into_tree) +{ + { + AssetCatalogTree tree; + std::unique_ptr<AssetCatalog> catalog_empty_path = AssetCatalog::from_path(""); + tree.insert_item(*catalog_empty_path); + + assert_expected_tree_items(&tree, {}); + } + + { + AssetCatalogTree tree; + + std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("item"); + tree.insert_item(*catalog); + assert_expected_tree_items(&tree, {{"item", 0}}); + + /* Insert child after parent already exists. */ + std::unique_ptr<AssetCatalog> child_catalog = AssetCatalog::from_path("item/child"); + tree.insert_item(*catalog); + assert_expected_tree_items(&tree, {{"item", 0}, {"item/child", 1}}); + + std::vector<CatalogPathInfo> expected_paths; + + /* Test inserting multi-component sub-path. */ + std::unique_ptr<AssetCatalog> grandgrandchild_catalog = AssetCatalog::from_path( + "item/child/grandchild/grandgrandchild"); + tree.insert_item(*catalog); + expected_paths = {{"item", 0}, + {"item/child", 1}, + {"item/child/grandchild", 2}, + {"item/child/grandchild/grandgrandchild", 3}}; + assert_expected_tree_items(&tree, expected_paths); + + std::unique_ptr<AssetCatalog> root_level_catalog = AssetCatalog::from_path("root level"); + tree.insert_item(*catalog); + expected_paths = {{"item", 0}, + {"item/child", 1}, + {"item/child/grandchild", 2}, + {"item/child/grandchild/grandgrandchild", 3}, + {"root level", 0}}; + assert_expected_tree_items(&tree, expected_paths); + } + + { + AssetCatalogTree tree; + + std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("item/child"); + tree.insert_item(*catalog); + assert_expected_tree_items(&tree, {{"item", 0}, {"item/child", 1}}); + } + + { + AssetCatalogTree tree; + + std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("white space"); + tree.insert_item(*catalog); + assert_expected_tree_items(&tree, {{"white space", 0}}); + } + + { + AssetCatalogTree tree; + + std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("/item/white space"); + tree.insert_item(*catalog); + assert_expected_tree_items(&tree, {{"item", 0}, {"item/white space", 1}}); + } + + { + AssetCatalogTree tree; + + std::unique_ptr<AssetCatalog> catalog_unicode_path = AssetCatalog::from_path("Ružena"); + tree.insert_item(*catalog_unicode_path); + assert_expected_tree_items(&tree, {{"Ružena", 0}}); + + catalog_unicode_path = AssetCatalog::from_path("Ružena/Ružena"); + tree.insert_item(*catalog_unicode_path); + assert_expected_tree_items(&tree, {{"Ružena", 0}, {"Ružena/Ružena", 1}}); + } +} + +TEST_F(AssetCatalogTest, load_single_file_into_tree) +{ + AssetCatalogService service(asset_library_root_); + service.load_from_disk(asset_library_root_ + "/" + "blender_assets.cats.txt"); + + /* Contains not only paths from the CDF but also the missing parents (implicitly defined + * catalogs). */ + std::vector<CatalogPathInfo> expected_paths{ + {"character", 0}, + {"character/Ellie", 1}, + {"character/Ellie/poselib", 2}, + {"character/Ellie/poselib/tailslash", 3}, + {"character/Ellie/poselib/white space", 3}, + {"character/Ružena", 1}, + {"character/Ružena/poselib", 2}, + {"character/Ružena/poselib/face", 3}, + {"character/Ružena/poselib/hand", 3}, + {"path", 0}, /* Implicit. */ + {"path/without", 1}, /* Implicit. */ + {"path/without/simplename", 2}, /* From CDF. */ + }; + + AssetCatalogTree *tree = service.get_catalog_tree(); + assert_expected_tree_items(tree, expected_paths); +} + +TEST_F(AssetCatalogTest, foreach_in_tree) +{ + { + AssetCatalogTree tree{}; + const std::vector<CatalogPathInfo> no_catalogs{}; + + assert_expected_tree_items(&tree, no_catalogs); + assert_expected_tree_root_items(&tree, no_catalogs); + /* Need a root item to check child items. */ + std::unique_ptr<AssetCatalog> catalog = AssetCatalog::from_path("something"); + tree.insert_item(*catalog); + tree.foreach_root_item([&no_catalogs, this](AssetCatalogTreeItem &item) { + assert_expected_tree_item_child_items(&item, no_catalogs); + }); + } + + AssetCatalogService service(asset_library_root_); + service.load_from_disk(asset_library_root_ + "/" + "blender_assets.cats.txt"); + + std::vector<CatalogPathInfo> expected_root_items{{"character", 0}, {"path", 0}}; + AssetCatalogTree *tree = service.get_catalog_tree(); + assert_expected_tree_root_items(tree, expected_root_items); + + /* Test if the direct children of the root item are what's expected. */ + std::vector<std::vector<CatalogPathInfo>> expected_root_child_items = { + /* Children of the "character" root item. */ + {{"character/Ellie", 1}, {"character/Ružena", 1}}, + /* Children of the "path" root item. */ + {{"path/without", 1}}, + }; + int i = 0; + tree->foreach_root_item([&expected_root_child_items, &i, this](AssetCatalogTreeItem &item) { + assert_expected_tree_item_child_items(&item, expected_root_child_items[i]); + i++; + }); +} + +TEST_F(AssetCatalogTest, find_catalog_by_path) +{ + TestableAssetCatalogService service(asset_library_root_); + service.load_from_disk(asset_library_root_ + "/" + + AssetCatalogService::DEFAULT_CATALOG_FILENAME); + + AssetCatalog *catalog; + + EXPECT_EQ(nullptr, service.find_catalog_by_path("")); + catalog = service.find_catalog_by_path("character/Ellie/poselib/white space"); + EXPECT_NE(nullptr, catalog); + EXPECT_EQ(UUID_POSES_ELLIE_WHITESPACE, catalog->catalog_id); + catalog = service.find_catalog_by_path("character/Ružena/poselib"); + EXPECT_NE(nullptr, catalog); + EXPECT_EQ(UUID_POSES_RUZENA, catalog->catalog_id); + + /* "character/Ellie/poselib" is used by two catalogs. Check if it's using the first one. */ + catalog = service.find_catalog_by_path("character/Ellie/poselib"); + EXPECT_NE(nullptr, catalog); + EXPECT_EQ(UUID_POSES_ELLIE, catalog->catalog_id); + EXPECT_NE(UUID_POSES_ELLIE_TRAILING_SLASH, catalog->catalog_id); +} + +TEST_F(AssetCatalogTest, write_single_file) +{ + TestableAssetCatalogService service(asset_library_root_); + service.load_from_disk(asset_library_root_ + "/" + + AssetCatalogService::DEFAULT_CATALOG_FILENAME); + + const CatalogFilePath save_to_path = use_temp_path() + + AssetCatalogService::DEFAULT_CATALOG_FILENAME; + AssetCatalogDefinitionFile *cdf = service.get_catalog_definition_file(); + cdf->write_to_disk(save_to_path); + + AssetCatalogService loaded_service(save_to_path); + loaded_service.load_from_disk(); + + /* Test that the expected catalogs are there. */ + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE_WHITESPACE)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE_TRAILING_SLASH)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA_HAND)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA_FACE)); + + /* Test that the invalid catalog definition wasn't copied. */ + EXPECT_EQ(nullptr, loaded_service.find_catalog(UUID_ID_WITHOUT_PATH)); + + /* TODO(@sybren): test ordering of catalogs in the file. */ +} + +TEST_F(AssetCatalogTest, no_writing_empty_files) +{ + const CatalogFilePath temp_lib_root = create_temp_path(); + AssetCatalogService service(temp_lib_root); + service.write_to_disk_on_blendfile_save(temp_lib_root + "phony.blend"); + + const CatalogFilePath default_cdf_path = temp_lib_root + + AssetCatalogService::DEFAULT_CATALOG_FILENAME; + EXPECT_FALSE(BLI_exists(default_cdf_path.c_str())); +} + +/* Already loaded a CDF, saving to some unrelated directory. */ +TEST_F(AssetCatalogTest, on_blendfile_save__with_existing_cdf) +{ + const CatalogFilePath top_level_dir = create_temp_path(); /* Has trailing slash. */ + + /* Create a copy of the CDF in SVN, so we can safely write to it. */ + const CatalogFilePath original_cdf_file = asset_library_root_ + "/blender_assets.cats.txt"; + const CatalogFilePath cdf_dirname = top_level_dir + "other_dir/"; + const CatalogFilePath cdf_filename = cdf_dirname + AssetCatalogService::DEFAULT_CATALOG_FILENAME; + ASSERT_TRUE(BLI_dir_create_recursive(cdf_dirname.c_str())); + ASSERT_EQ(0, BLI_copy(original_cdf_file.c_str(), cdf_filename.c_str())) + << "Unable to copy " << original_cdf_file << " to " << cdf_filename; + + /* Load the CDF, add a catalog, and trigger a write. This should write to the loaded CDF. */ + TestableAssetCatalogService service(cdf_filename); + service.load_from_disk(); + const AssetCatalog *cat = service.create_catalog("some/catalog/path"); + + const CatalogFilePath blendfilename = top_level_dir + "subdir/some_file.blend"; + ASSERT_TRUE(service.write_to_disk_on_blendfile_save(blendfilename)); + EXPECT_EQ(cdf_filename, service.get_catalog_definition_file()->file_path); + + /* Test that the CDF was created in the expected location. */ + const CatalogFilePath backup_filename = cdf_filename + "~"; + EXPECT_TRUE(BLI_exists(cdf_filename.c_str())); + EXPECT_TRUE(BLI_exists(backup_filename.c_str())) + << "Overwritten CDF should have been backed up."; + + /* Test that the on-disk CDF contains the expected catalogs. */ + AssetCatalogService loaded_service(cdf_filename); + loaded_service.load_from_disk(); + EXPECT_NE(nullptr, loaded_service.find_catalog(cat->catalog_id)) + << "Expected to see the newly-created catalog."; + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE)) + << "Expected to see the already-existing catalog."; +} + +/* Create some catalogs in memory, save to directory that doesn't contain anything else. */ +TEST_F(AssetCatalogTest, on_blendfile_save__from_memory_into_empty_directory) +{ + const CatalogFilePath target_dir = create_temp_path(); /* Has trailing slash. */ + + TestableAssetCatalogService service; + const AssetCatalog *cat = service.create_catalog("some/catalog/path"); + + const CatalogFilePath blendfilename = target_dir + "some_file.blend"; + ASSERT_TRUE(service.write_to_disk_on_blendfile_save(blendfilename)); + + /* Test that the CDF was created in the expected location. */ + const CatalogFilePath expected_cdf_path = target_dir + + AssetCatalogService::DEFAULT_CATALOG_FILENAME; + EXPECT_TRUE(BLI_exists(expected_cdf_path.c_str())); + + /* Test that the in-memory CDF has been created, and contains the expected catalog. */ + AssetCatalogDefinitionFile *cdf = service.get_catalog_definition_file(); + ASSERT_NE(nullptr, cdf); + EXPECT_TRUE(cdf->contains(cat->catalog_id)); + + /* Test that the on-disk CDF contains the expected catalog. */ + AssetCatalogService loaded_service(expected_cdf_path); + loaded_service.load_from_disk(); + EXPECT_NE(nullptr, loaded_service.find_catalog(cat->catalog_id)); +} + +/* Create some catalogs in memory, save to directory that contains a default CDF. */ +TEST_F(AssetCatalogTest, on_blendfile_save__from_memory_into_existing_cdf_and_merge) +{ + const CatalogFilePath target_dir = create_temp_path(); /* Has trailing slash. */ + const CatalogFilePath original_cdf_file = asset_library_root_ + "/blender_assets.cats.txt"; + const CatalogFilePath writable_cdf_file = target_dir + + AssetCatalogService::DEFAULT_CATALOG_FILENAME; + ASSERT_EQ(0, BLI_copy(original_cdf_file.c_str(), writable_cdf_file.c_str())); + + /* Create the catalog service without loading the already-existing CDF. */ + TestableAssetCatalogService service; + const AssetCatalog *cat = service.create_catalog("some/catalog/path"); + + /* Mock that the blend file is written to a subdirectory of the asset library. */ + const CatalogFilePath blendfilename = target_dir + "some_file.blend"; + ASSERT_TRUE(service.write_to_disk_on_blendfile_save(blendfilename)); + + /* Test that the CDF still exists in the expected location. */ + const CatalogFilePath backup_filename = writable_cdf_file + "~"; + EXPECT_TRUE(BLI_exists(writable_cdf_file.c_str())); + EXPECT_TRUE(BLI_exists(backup_filename.c_str())) + << "Overwritten CDF should have been backed up."; + + /* Test that the in-memory CDF has the expected file path. */ + AssetCatalogDefinitionFile *cdf = service.get_catalog_definition_file(); + ASSERT_NE(nullptr, cdf); + EXPECT_EQ(writable_cdf_file, cdf->file_path); + + /* Test that the in-memory catalogs have been merged with the on-disk one. */ + AssetCatalogService loaded_service(writable_cdf_file); + loaded_service.load_from_disk(); + EXPECT_NE(nullptr, loaded_service.find_catalog(cat->catalog_id)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE)); +} + +/* Create some catalogs in memory, save to subdirectory of a registered asset library. */ +TEST_F(AssetCatalogTest, on_blendfile_save__from_memory_into_existing_asset_lib) +{ + const CatalogFilePath target_dir = create_temp_path(); /* Has trailing slash. */ + const CatalogFilePath original_cdf_file = asset_library_root_ + "/blender_assets.cats.txt"; + const CatalogFilePath registered_asset_lib = target_dir + "my_asset_library/"; + CatalogFilePath writable_cdf_file = registered_asset_lib + + AssetCatalogService::DEFAULT_CATALOG_FILENAME; + BLI_path_slash_native(writable_cdf_file.data()); + + /* Set up a temporary asset library for testing. */ + bUserAssetLibrary *asset_lib_pref = BKE_preferences_asset_library_add( + &U, "Test", registered_asset_lib.c_str()); + ASSERT_NE(nullptr, asset_lib_pref); + ASSERT_TRUE(BLI_dir_create_recursive(registered_asset_lib.c_str())); + ASSERT_EQ(0, BLI_copy(original_cdf_file.c_str(), writable_cdf_file.c_str())); + + /* Create the catalog service without loading the already-existing CDF. */ + TestableAssetCatalogService service; + const CatalogFilePath blenddirname = registered_asset_lib + "subdirectory/"; + const CatalogFilePath blendfilename = blenddirname + "some_file.blend"; + ASSERT_TRUE(BLI_dir_create_recursive(blenddirname.c_str())); + const AssetCatalog *cat = service.create_catalog("some/catalog/path"); + + /* Mock that the blend file is written to the directory already containing a CDF. */ + ASSERT_TRUE(service.write_to_disk_on_blendfile_save(blendfilename)); + + /* Test that the CDF still exists in the expected location. */ + EXPECT_TRUE(BLI_exists(writable_cdf_file.c_str())); + const CatalogFilePath backup_filename = writable_cdf_file + "~"; + EXPECT_TRUE(BLI_exists(backup_filename.c_str())) + << "Overwritten CDF should have been backed up."; + + /* Test that the in-memory CDF has the expected file path. */ + AssetCatalogDefinitionFile *cdf = service.get_catalog_definition_file(); + BLI_path_slash_native(cdf->file_path.data()); + EXPECT_EQ(writable_cdf_file, cdf->file_path); + + /* Test that the in-memory catalogs have been merged with the on-disk one. */ + AssetCatalogService loaded_service(writable_cdf_file); + loaded_service.load_from_disk(); + EXPECT_NE(nullptr, loaded_service.find_catalog(cat->catalog_id)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE)); + + BKE_preferences_asset_library_remove(&U, asset_lib_pref); +} + +TEST_F(AssetCatalogTest, create_first_catalog_from_scratch) +{ + /* Even from scratch a root directory should be known. */ + const CatalogFilePath temp_lib_root = use_temp_path(); + AssetCatalogService service; + + /* Just creating the service should NOT create the path. */ + EXPECT_FALSE(BLI_exists(temp_lib_root.c_str())); + + AssetCatalog *cat = service.create_catalog("some/catalog/path"); + ASSERT_NE(nullptr, cat); + EXPECT_EQ(cat->path, "some/catalog/path"); + EXPECT_EQ(cat->simple_name, "some-catalog-path"); + + /* Creating a new catalog should not save anything to disk yet. */ + EXPECT_FALSE(BLI_exists(temp_lib_root.c_str())); + + /* Writing to disk should create the directory + the default file. */ + service.write_to_disk_on_blendfile_save(temp_lib_root + "phony.blend"); + EXPECT_TRUE(BLI_is_dir(temp_lib_root.c_str())); + + const CatalogFilePath definition_file_path = temp_lib_root + "/" + + AssetCatalogService::DEFAULT_CATALOG_FILENAME; + EXPECT_TRUE(BLI_is_file(definition_file_path.c_str())); + + AssetCatalogService loaded_service(temp_lib_root); + loaded_service.load_from_disk(); + + /* Test that the expected catalog is there. */ + AssetCatalog *written_cat = loaded_service.find_catalog(cat->catalog_id); + ASSERT_NE(nullptr, written_cat); + EXPECT_EQ(written_cat->catalog_id, cat->catalog_id); + EXPECT_EQ(written_cat->path, cat->path.str()); +} + +TEST_F(AssetCatalogTest, create_catalog_after_loading_file) +{ + const CatalogFilePath temp_lib_root = create_temp_path(); + + /* Copy the asset catalog definition files to a separate location, so that we can test without + * overwriting the test file in SVN. */ + const CatalogFilePath default_catalog_path = asset_library_root_ + "/" + + AssetCatalogService::DEFAULT_CATALOG_FILENAME; + const CatalogFilePath writable_catalog_path = temp_lib_root + + AssetCatalogService::DEFAULT_CATALOG_FILENAME; + ASSERT_EQ(0, BLI_copy(default_catalog_path.c_str(), writable_catalog_path.c_str())); + EXPECT_TRUE(BLI_is_dir(temp_lib_root.c_str())); + EXPECT_TRUE(BLI_is_file(writable_catalog_path.c_str())); + + TestableAssetCatalogService service(temp_lib_root); + service.load_from_disk(); + EXPECT_EQ(writable_catalog_path, service.get_catalog_definition_file()->file_path); + EXPECT_NE(nullptr, service.find_catalog(UUID_POSES_ELLIE)) << "expected catalogs to be loaded"; + + /* This should create a new catalog but not write to disk. */ + const AssetCatalog *new_catalog = service.create_catalog("new/catalog"); + const bUUID new_catalog_id = new_catalog->catalog_id; + + /* Reload the on-disk catalog file. */ + TestableAssetCatalogService loaded_service(temp_lib_root); + loaded_service.load_from_disk(); + EXPECT_EQ(writable_catalog_path, loaded_service.get_catalog_definition_file()->file_path); + + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE)) + << "expected pre-existing catalogs to be kept in the file"; + EXPECT_EQ(nullptr, loaded_service.find_catalog(new_catalog_id)) + << "expecting newly added catalog to not yet be saved to " << temp_lib_root; + + /* Write and reload the catalog file. */ + service.write_to_disk_on_blendfile_save(temp_lib_root + "phony.blend"); + AssetCatalogService reloaded_service(temp_lib_root); + reloaded_service.load_from_disk(); + EXPECT_NE(nullptr, reloaded_service.find_catalog(UUID_POSES_ELLIE)) + << "expected pre-existing catalogs to be kept in the file"; + EXPECT_NE(nullptr, reloaded_service.find_catalog(new_catalog_id)) + << "expecting newly added catalog to exist in the file"; +} + +TEST_F(AssetCatalogTest, create_catalog_path_cleanup) +{ + AssetCatalogService service; + AssetCatalog *cat = service.create_catalog(" /some/path / "); + + EXPECT_FALSE(BLI_uuid_is_nil(cat->catalog_id)); + EXPECT_EQ("some/path", cat->path.str()); + EXPECT_EQ("some-path", cat->simple_name); +} + +TEST_F(AssetCatalogTest, create_catalog_simple_name) +{ + AssetCatalogService service; + AssetCatalog *cat = service.create_catalog( + "production/Spite Fright/Characters/Victora/Pose Library/Approved/Body Parts/Hands"); + + EXPECT_FALSE(BLI_uuid_is_nil(cat->catalog_id)); + EXPECT_EQ("production/Spite Fright/Characters/Victora/Pose Library/Approved/Body Parts/Hands", + cat->path.str()); + EXPECT_EQ("...ht-Characters-Victora-Pose Library-Approved-Body Parts-Hands", cat->simple_name); +} + +TEST_F(AssetCatalogTest, delete_catalog_leaf) +{ + AssetCatalogService service(asset_library_root_); + service.load_from_disk(asset_library_root_ + "/" + "blender_assets.cats.txt"); + + /* Delete a leaf catalog, i.e. one that is not a parent of another catalog. + * This keeps this particular test easy. */ + service.delete_catalog(UUID_POSES_RUZENA_HAND); + EXPECT_EQ(nullptr, service.find_catalog(UUID_POSES_RUZENA_HAND)); + + /* Contains not only paths from the CDF but also the missing parents (implicitly defined + * catalogs). This is why a leaf catalog was deleted. */ + std::vector<CatalogPathInfo> expected_paths{ + {"character", 0}, + {"character/Ellie", 1}, + {"character/Ellie/poselib", 2}, + {"character/Ellie/poselib/tailslash", 3}, + {"character/Ellie/poselib/white space", 3}, + {"character/Ružena", 1}, + {"character/Ružena/poselib", 2}, + {"character/Ružena/poselib/face", 3}, + // {"character/Ružena/poselib/hand", 3}, /* This is the deleted one. */ + {"path", 0}, + {"path/without", 1}, + {"path/without/simplename", 2}, + }; + + AssetCatalogTree *tree = service.get_catalog_tree(); + assert_expected_tree_items(tree, expected_paths); +} + +TEST_F(AssetCatalogTest, delete_catalog_write_to_disk) +{ + TestableAssetCatalogService service(asset_library_root_); + service.load_from_disk(asset_library_root_ + "/" + + AssetCatalogService::DEFAULT_CATALOG_FILENAME); + + service.delete_catalog(UUID_POSES_ELLIE); + + const CatalogFilePath save_to_path = use_temp_path(); + AssetCatalogDefinitionFile *cdf = service.get_catalog_definition_file(); + cdf->write_to_disk(save_to_path + "/" + AssetCatalogService::DEFAULT_CATALOG_FILENAME); + + AssetCatalogService loaded_service(save_to_path); + loaded_service.load_from_disk(); + + /* Test that the expected catalogs are there, except the deleted one. */ + EXPECT_EQ(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE_WHITESPACE)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE_TRAILING_SLASH)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA_HAND)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA_FACE)); +} + +TEST_F(AssetCatalogTest, update_catalog_path) +{ + AssetCatalogService service(asset_library_root_); + service.load_from_disk(asset_library_root_ + "/" + + AssetCatalogService::DEFAULT_CATALOG_FILENAME); + + const AssetCatalog *orig_cat = service.find_catalog(UUID_POSES_RUZENA); + const AssetCatalogPath orig_path = orig_cat->path; + + service.update_catalog_path(UUID_POSES_RUZENA, "charlib/Ružena"); + + EXPECT_EQ(nullptr, service.find_catalog_by_path(orig_path)) + << "The original (pre-rename) path should not be associated with a catalog any more."; + + const AssetCatalog *renamed_cat = service.find_catalog(UUID_POSES_RUZENA); + ASSERT_NE(nullptr, renamed_cat); + ASSERT_EQ(orig_cat, renamed_cat) << "Changing the path should not reallocate the catalog."; + EXPECT_EQ(orig_cat->simple_name, renamed_cat->simple_name) + << "Changing the path should not change the simple name."; + EXPECT_EQ(orig_cat->catalog_id, renamed_cat->catalog_id) + << "Changing the path should not change the catalog ID."; + + EXPECT_EQ("charlib/Ružena", renamed_cat->path.str()) + << "Changing the path should change the path. Surprise."; + + EXPECT_EQ("charlib/Ružena/hand", service.find_catalog(UUID_POSES_RUZENA_HAND)->path.str()) + << "Changing the path should update children."; + EXPECT_EQ("charlib/Ružena/face", service.find_catalog(UUID_POSES_RUZENA_FACE)->path.str()) + << "Changing the path should update children."; +} + +TEST_F(AssetCatalogTest, merge_catalog_files) +{ + const CatalogFilePath cdf_dir = create_temp_path(); + const CatalogFilePath original_cdf_file = asset_library_root_ + "/blender_assets.cats.txt"; + const CatalogFilePath modified_cdf_file = asset_library_root_ + "/modified_assets.cats.txt"; + const CatalogFilePath temp_cdf_file = cdf_dir + "blender_assets.cats.txt"; + ASSERT_EQ(0, BLI_copy(original_cdf_file.c_str(), temp_cdf_file.c_str())); + + /* Load the unmodified, original CDF. */ + TestableAssetCatalogService service(asset_library_root_); + service.load_from_disk(cdf_dir); + + /* Copy a modified file, to mimic a situation where someone changed the + * CDF after we loaded it. */ + ASSERT_EQ(0, BLI_copy(modified_cdf_file.c_str(), temp_cdf_file.c_str())); + + /* Overwrite the modified file. This should merge the on-disk file with our catalogs. */ + service.write_to_disk_on_blendfile_save(cdf_dir + "phony.blend"); + + AssetCatalogService loaded_service(cdf_dir); + loaded_service.load_from_disk(); + + /* Test that the expected catalogs are there. */ + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE_WHITESPACE)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE_TRAILING_SLASH)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA_HAND)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA_FACE)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_AGENT_47)); /* New in the modified file. */ + + /* When there are overlaps, the in-memory (i.e. last-saved) paths should win. */ + const AssetCatalog *ruzena_face = loaded_service.find_catalog(UUID_POSES_RUZENA_FACE); + EXPECT_EQ("character/Ružena/poselib/face", ruzena_face->path.str()); +} + +TEST_F(AssetCatalogTest, backups) +{ + const CatalogFilePath cdf_dir = create_temp_path(); + const CatalogFilePath original_cdf_file = asset_library_root_ + "/blender_assets.cats.txt"; + const CatalogFilePath writable_cdf_file = cdf_dir + "/blender_assets.cats.txt"; + ASSERT_EQ(0, BLI_copy(original_cdf_file.c_str(), writable_cdf_file.c_str())); + + /* Read a CDF, modify, and write it. */ + AssetCatalogService service(cdf_dir); + service.load_from_disk(); + service.delete_catalog(UUID_POSES_ELLIE); + service.write_to_disk_on_blendfile_save(cdf_dir + "phony.blend"); + + const CatalogFilePath backup_path = writable_cdf_file + "~"; + ASSERT_TRUE(BLI_is_file(backup_path.c_str())); + + AssetCatalogService loaded_service; + loaded_service.load_from_disk(backup_path); + + /* Test that the expected catalogs are there, including the deleted one. + * This is the backup, after all. */ + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE_WHITESPACE)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_ELLIE_TRAILING_SLASH)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA_HAND)); + EXPECT_NE(nullptr, loaded_service.find_catalog(UUID_POSES_RUZENA_FACE)); +} + +TEST_F(AssetCatalogTest, order_by_path) +{ + const bUUID cat2_uuid("22222222-b847-44d9-bdca-ff04db1c24f5"); + const bUUID cat4_uuid("11111111-b847-44d9-bdca-ff04db1c24f5"); /* Sorts earlier than above. */ + const AssetCatalog cat1(BLI_uuid_generate_random(), "simple/path/child", ""); + const AssetCatalog cat2(cat2_uuid, "simple/path", ""); + const AssetCatalog cat3(BLI_uuid_generate_random(), "complex/path/...or/is/it?", ""); + const AssetCatalog cat4( + cat4_uuid, "simple/path", "different ID, same path"); /* should be kept */ + const AssetCatalog cat5(cat4_uuid, "simple/path", "same ID, same path"); /* disappears */ + + AssetCatalogOrderedSet by_path; + by_path.insert(&cat1); + by_path.insert(&cat2); + by_path.insert(&cat3); + by_path.insert(&cat4); + by_path.insert(&cat5); + + AssetCatalogOrderedSet::const_iterator set_iter = by_path.begin(); + + EXPECT_EQ(1, by_path.count(&cat1)); + EXPECT_EQ(1, by_path.count(&cat2)); + EXPECT_EQ(1, by_path.count(&cat3)); + EXPECT_EQ(1, by_path.count(&cat4)); + ASSERT_EQ(4, by_path.size()) << "Expecting cat5 to not be stored in the set, as it duplicates " + "an already-existing path + UUID"; + + EXPECT_EQ(cat3.catalog_id, (*(set_iter++))->catalog_id); /* complex/path */ + EXPECT_EQ(cat4.catalog_id, (*(set_iter++))->catalog_id); /* simple/path with 111.. ID */ + EXPECT_EQ(cat2.catalog_id, (*(set_iter++))->catalog_id); /* simple/path with 222.. ID */ + EXPECT_EQ(cat1.catalog_id, (*(set_iter++))->catalog_id); /* simple/path/child */ + + if (set_iter != by_path.end()) { + const AssetCatalog *next_cat = *set_iter; + FAIL() << "Did not expect more items in the set, had at least " << next_cat->catalog_id << ":" + << next_cat->path; + } +} + +TEST_F(AssetCatalogTest, create_missing_catalogs) +{ + TestableAssetCatalogService new_service; + new_service.create_catalog("path/with/missing/parents"); + + EXPECT_EQ(nullptr, new_service.find_catalog_by_path("path/with/missing")) + << "Missing parents should not be immediately created."; + EXPECT_EQ(nullptr, new_service.find_catalog_by_path("")) << "Empty path should never be valid"; + + new_service.create_missing_catalogs(); + + EXPECT_NE(nullptr, new_service.find_catalog_by_path("path/with/missing")); + EXPECT_NE(nullptr, new_service.find_catalog_by_path("path/with")); + EXPECT_NE(nullptr, new_service.find_catalog_by_path("path")); + EXPECT_EQ(nullptr, new_service.find_catalog_by_path("")) + << "Empty path should never be valid, even when after missing catalogs"; +} + +TEST_F(AssetCatalogTest, create_missing_catalogs_after_loading) +{ + TestableAssetCatalogService loaded_service(asset_library_root_); + loaded_service.load_from_disk(); + + const AssetCatalog *cat_char = loaded_service.find_catalog_by_path("character"); + const AssetCatalog *cat_ellie = loaded_service.find_catalog_by_path("character/Ellie"); + const AssetCatalog *cat_ruzena = loaded_service.find_catalog_by_path("character/Ružena"); + ASSERT_NE(nullptr, cat_char) << "Missing parents should be created immediately after loading."; + ASSERT_NE(nullptr, cat_ellie) << "Missing parents should be created immediately after loading."; + ASSERT_NE(nullptr, cat_ruzena) << "Missing parents should be created immediately after loading."; + + AssetCatalogDefinitionFile *cdf = loaded_service.get_catalog_definition_file(); + ASSERT_NE(nullptr, cdf); + EXPECT_TRUE(cdf->contains(cat_char->catalog_id)) << "Missing parents should be saved to a CDF."; + EXPECT_TRUE(cdf->contains(cat_ellie->catalog_id)) << "Missing parents should be saved to a CDF."; + EXPECT_TRUE(cdf->contains(cat_ruzena->catalog_id)) + << "Missing parents should be saved to a CDF."; + + /* Check that each missing parent is only created once. The CDF contains multiple paths that + * could trigger the creation of missing parents, so this test makes sense. */ + EXPECT_EQ(1, loaded_service.count_catalogs_with_path("character")); + EXPECT_EQ(1, loaded_service.count_catalogs_with_path("character/Ellie")); + EXPECT_EQ(1, loaded_service.count_catalogs_with_path("character/Ružena")); +} + +TEST_F(AssetCatalogTest, create_catalog_filter) +{ + AssetCatalogService service(asset_library_root_); + service.load_from_disk(); + + /* Alias for the same catalog as the main one. */ + AssetCatalog *alias_ruzena = service.create_catalog("character/Ružena/poselib"); + /* Alias for a sub-catalog. */ + AssetCatalog *alias_ruzena_hand = service.create_catalog("character/Ružena/poselib/hand"); + + AssetCatalogFilter filter = service.create_catalog_filter(UUID_POSES_RUZENA); + + /* Positive test for loaded-from-disk catalogs. */ + EXPECT_TRUE(filter.contains(UUID_POSES_RUZENA)) + << "Main catalog should be included in the filter."; + EXPECT_TRUE(filter.contains(UUID_POSES_RUZENA_HAND)) + << "Sub-catalog should be included in the filter."; + EXPECT_TRUE(filter.contains(UUID_POSES_RUZENA_FACE)) + << "Sub-catalog should be included in the filter."; + + /* Positive test for newly-created catalogs. */ + EXPECT_TRUE(filter.contains(alias_ruzena->catalog_id)) + << "Alias of main catalog should be included in the filter."; + EXPECT_TRUE(filter.contains(alias_ruzena_hand->catalog_id)) + << "Alias of sub-catalog should be included in the filter."; + + /* Negative test for unrelated catalogs. */ + EXPECT_FALSE(filter.contains(BLI_uuid_nil())) << "Nil catalog should not be included."; + EXPECT_FALSE(filter.contains(UUID_ID_WITHOUT_PATH)); + EXPECT_FALSE(filter.contains(UUID_POSES_ELLIE)); + EXPECT_FALSE(filter.contains(UUID_POSES_ELLIE_WHITESPACE)); + EXPECT_FALSE(filter.contains(UUID_POSES_ELLIE_TRAILING_SLASH)); + EXPECT_FALSE(filter.contains(UUID_WITHOUT_SIMPLENAME)); +} + +TEST_F(AssetCatalogTest, create_catalog_filter_for_unknown_uuid) +{ + AssetCatalogService service; + const bUUID unknown_uuid = BLI_uuid_generate_random(); + + AssetCatalogFilter filter = service.create_catalog_filter(unknown_uuid); + EXPECT_TRUE(filter.contains(unknown_uuid)); + + EXPECT_FALSE(filter.contains(BLI_uuid_nil())) << "Nil catalog should not be included."; + EXPECT_FALSE(filter.contains(UUID_POSES_ELLIE)); +} + +TEST_F(AssetCatalogTest, create_catalog_filter_for_unassigned_assets) +{ + AssetCatalogService service; + + AssetCatalogFilter filter = service.create_catalog_filter(BLI_uuid_nil()); + EXPECT_TRUE(filter.contains(BLI_uuid_nil())); + EXPECT_FALSE(filter.contains(UUID_POSES_ELLIE)); +} + +} // namespace blender::bke::tests diff --git a/source/blender/blenkernel/intern/asset_library.cc b/source/blender/blenkernel/intern/asset_library.cc new file mode 100644 index 00000000000..27e66ee5725 --- /dev/null +++ b/source/blender/blenkernel/intern/asset_library.cc @@ -0,0 +1,141 @@ +/* + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +/** \file + * \ingroup bke + */ + +#include "BKE_asset_library.hh" +#include "BKE_callbacks.h" +#include "BKE_main.h" +#include "BKE_preferences.h" + +#include "BLI_path_util.h" + +#include "DNA_userdef_types.h" + +#include "MEM_guardedalloc.h" + +#include <memory> + +/** + * Loading an asset library at this point only means loading the catalogs. Later on this should + * invoke reading of asset representations too. + */ +struct AssetLibrary *BKE_asset_library_load(const char *library_path) +{ + blender::bke::AssetLibrary *lib = new blender::bke::AssetLibrary(); + lib->on_save_handler_register(); + lib->load(library_path); + return reinterpret_cast<struct AssetLibrary *>(lib); +} + +void BKE_asset_library_free(struct AssetLibrary *asset_library) +{ + blender::bke::AssetLibrary *lib = reinterpret_cast<blender::bke::AssetLibrary *>(asset_library); + lib->on_save_handler_unregister(); + delete lib; +} + +bool BKE_asset_library_find_suitable_root_path_from_path(const char *input_path, + char *r_library_path) +{ + if (bUserAssetLibrary *preferences_lib = BKE_preferences_asset_library_containing_path( + &U, input_path)) { + BLI_strncpy(r_library_path, preferences_lib->path, FILE_MAXDIR); + return true; + } + + BLI_split_dir_part(input_path, r_library_path, FILE_MAXDIR); + return r_library_path[0] != '\0'; +} + +bool BKE_asset_library_find_suitable_root_path_from_main(const Main *bmain, char *r_library_path) +{ + return BKE_asset_library_find_suitable_root_path_from_path(bmain->name, r_library_path); +} + +blender::bke::AssetCatalogService *BKE_asset_library_get_catalog_service( + const ::AssetLibrary *library_c) +{ + if (library_c == nullptr) { + return nullptr; + } + + const blender::bke::AssetLibrary &library = reinterpret_cast<const blender::bke::AssetLibrary &>( + *library_c); + return library.catalog_service.get(); +} + +blender::bke::AssetCatalogTree *BKE_asset_library_get_catalog_tree(const ::AssetLibrary *library) +{ + blender::bke::AssetCatalogService *catalog_service = BKE_asset_library_get_catalog_service( + library); + if (catalog_service == nullptr) { + return nullptr; + } + + return catalog_service->get_catalog_tree(); +} + +namespace blender::bke { + +void AssetLibrary::load(StringRefNull library_root_directory) +{ + auto catalog_service = std::make_unique<AssetCatalogService>(library_root_directory); + catalog_service->load_from_disk(); + this->catalog_service = std::move(catalog_service); +} + +namespace { +void asset_library_on_save_post(struct Main *main, + struct PointerRNA **pointers, + const int num_pointers, + void *arg) +{ + AssetLibrary *asset_lib = static_cast<AssetLibrary *>(arg); + asset_lib->on_save_post(main, pointers, num_pointers); +} +} // namespace + +void AssetLibrary::on_save_handler_register() +{ + /* The callback system doesn't own `on_save_callback_store_`. */ + on_save_callback_store_.alloc = false; + + on_save_callback_store_.func = asset_library_on_save_post; + on_save_callback_store_.arg = this; + + BKE_callback_add(&on_save_callback_store_, BKE_CB_EVT_SAVE_POST); +} + +void AssetLibrary::on_save_handler_unregister() +{ + BKE_callback_remove(&on_save_callback_store_, BKE_CB_EVT_SAVE_POST); +} + +void AssetLibrary::on_save_post(struct Main *main, + struct PointerRNA ** /*pointers*/, + const int /*num_pointers*/) +{ + if (this->catalog_service == nullptr) { + return; + } + + this->catalog_service->write_to_disk_on_blendfile_save(main->name); +} + +} // namespace blender::bke diff --git a/source/blender/blenkernel/intern/asset_library_test.cc b/source/blender/blenkernel/intern/asset_library_test.cc new file mode 100644 index 00000000000..30ac4dc6ad8 --- /dev/null +++ b/source/blender/blenkernel/intern/asset_library_test.cc @@ -0,0 +1,82 @@ +/* + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + * + * The Original Code is Copyright (C) 2020 Blender Foundation + * All rights reserved. + */ + +#include "BKE_appdir.h" +#include "BKE_asset_catalog.hh" +#include "BKE_asset_library.hh" + +#include "testing/testing.h" + +namespace blender::bke::tests { + +TEST(AssetLibraryTest, load_and_free_c_functions) +{ + const std::string test_files_dir = blender::tests::flags_test_asset_dir(); + if (test_files_dir.empty()) { + FAIL(); + } + + /* Load the asset library. */ + const std::string library_path = test_files_dir + "/" + "asset_library"; + ::AssetLibrary *library_c_ptr = BKE_asset_library_load(library_path.data()); + ASSERT_NE(nullptr, library_c_ptr); + + /* Check that it can be cast to the C++ type and has a Catalog Service. */ + blender::bke::AssetLibrary *library_cpp_ptr = reinterpret_cast<blender::bke::AssetLibrary *>( + library_c_ptr); + AssetCatalogService *service = library_cpp_ptr->catalog_service.get(); + ASSERT_NE(nullptr, service); + + /* Check that the catalogs defined in the library are actually loaded. This just tests one single + * catalog, as that indicates the file has been loaded. Testing that that loading went OK is for + * the asset catalog service tests. */ + const bUUID uuid_poses_ellie("df60e1f6-2259-475b-93d9-69a1b4a8db78"); + AssetCatalog *poses_ellie = service->find_catalog(uuid_poses_ellie); + ASSERT_NE(nullptr, poses_ellie) << "unable to find POSES_ELLIE catalog"; + EXPECT_EQ("character/Ellie/poselib", poses_ellie->path.str()); + + BKE_asset_library_free(library_c_ptr); +} + +TEST(AssetLibraryTest, load_nonexistent_directory) +{ + const std::string test_files_dir = blender::tests::flags_test_asset_dir(); + if (test_files_dir.empty()) { + FAIL(); + } + + /* Load the asset library. */ + const std::string library_path = test_files_dir + "/" + + "asset_library/this/subdir/does/not/exist"; + ::AssetLibrary *library_c_ptr = BKE_asset_library_load(library_path.data()); + ASSERT_NE(nullptr, library_c_ptr); + + /* Check that it can be cast to the C++ type and has a Catalog Service. */ + blender::bke::AssetLibrary *library_cpp_ptr = reinterpret_cast<blender::bke::AssetLibrary *>( + library_c_ptr); + AssetCatalogService *service = library_cpp_ptr->catalog_service.get(); + ASSERT_NE(nullptr, service); + + /* Check that the catalog service doesn't have any catalogs. */ + EXPECT_TRUE(service->is_empty()); + + BKE_asset_library_free(library_c_ptr); +} + +} // namespace blender::bke::tests diff --git a/source/blender/blenkernel/intern/asset_test.cc b/source/blender/blenkernel/intern/asset_test.cc new file mode 100644 index 00000000000..77b98a8ac0a --- /dev/null +++ b/source/blender/blenkernel/intern/asset_test.cc @@ -0,0 +1,70 @@ +/* + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software Foundation, + * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + * + * The Original Code is Copyright (C) 2020 Blender Foundation + * All rights reserved. + */ + +#include "BKE_asset.h" + +#include "BLI_uuid.h" + +#include "DNA_asset_types.h" + +#include "testing/testing.h" + +namespace blender::bke::tests { + +TEST(AssetMetadataTest, set_catalog_id) +{ + AssetMetaData meta; + const bUUID uuid = BLI_uuid_generate_random(); + + /* Test trivial values. */ + BKE_asset_metadata_catalog_id_clear(&meta); + EXPECT_TRUE(BLI_uuid_is_nil(meta.catalog_id)); + EXPECT_STREQ("", meta.catalog_simple_name); + + /* Test simple situation where the given short name is used as-is. */ + BKE_asset_metadata_catalog_id_set(&meta, uuid, "simple"); + EXPECT_TRUE(BLI_uuid_equal(uuid, meta.catalog_id)); + EXPECT_STREQ("simple", meta.catalog_simple_name); + + /* Test white-space trimming. */ + BKE_asset_metadata_catalog_id_set(&meta, uuid, " Govoriš angleško? "); + EXPECT_STREQ("Govoriš angleško?", meta.catalog_simple_name); + + /* Test length trimming to 63 chars + terminating zero. */ + constexpr char len66[] = "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"; + constexpr char len63[] = "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1"; + BKE_asset_metadata_catalog_id_set(&meta, uuid, len66); + EXPECT_STREQ(len63, meta.catalog_simple_name); + + /* Test length trimming happens after white-space trimming. */ + constexpr char len68[] = + " \ + 000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20 "; + BKE_asset_metadata_catalog_id_set(&meta, uuid, len68); + EXPECT_STREQ(len63, meta.catalog_simple_name); + + /* Test length trimming to 63 bytes, and not 63 characters. ✓ in UTF-8 is three bytes long. */ + constexpr char with_utf8[] = + "00010203040506✓0708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20"; + BKE_asset_metadata_catalog_id_set(&meta, uuid, with_utf8); + EXPECT_STREQ("00010203040506✓0708090a0b0c0d0e0f101112131415161718191a1b1c1d", + meta.catalog_simple_name); +} + +} // namespace blender::bke::tests diff --git a/source/blender/blenkernel/intern/attribute_access.cc b/source/blender/blenkernel/intern/attribute_access.cc index 8c4f87be91f..c2837b522c4 100644 --- a/source/blender/blenkernel/intern/attribute_access.cc +++ b/source/blender/blenkernel/intern/attribute_access.cc @@ -55,6 +55,21 @@ using blender::fn::GVArray_For_SingleValue; namespace blender::bke { +std::ostream &operator<<(std::ostream &stream, const AttributeIDRef &attribute_id) +{ + if (attribute_id.is_named()) { + stream << attribute_id.name(); + } + else if (attribute_id.is_anonymous()) { + const AnonymousAttributeID &anonymous_id = attribute_id.anonymous_id(); + stream << "<" << BKE_anonymous_attribute_id_debug_name(&anonymous_id) << ">"; + } + else { + stream << "<none>"; + } + return stream; +} + const blender::fn::CPPType *custom_data_type_to_cpp_type(const CustomDataType type) { switch (type) { diff --git a/source/blender/blenkernel/intern/blendfile.c b/source/blender/blenkernel/intern/blendfile.c index 1c5d8804280..6957f9b5a69 100644 --- a/source/blender/blenkernel/intern/blendfile.c +++ b/source/blender/blenkernel/intern/blendfile.c @@ -344,6 +344,13 @@ static void setup_app_data(bContext *C, do_versions_ipos_to_animato(bmain); } + /* FIXME: Same as above, readfile's `do_version` do not allow to create new IDs. */ + /* TODO: Once this is definitively validated for 3.0 and option to not do it is removed, add a + * version bump and check here. */ + if (!USER_EXPERIMENTAL_TEST(&U, no_proxy_to_override_conversion)) { + BKE_lib_override_library_main_proxy_convert(bmain, reports); + } + bmain->recovered = 0; /* startup.blend or recovered startup */ diff --git a/source/blender/blenkernel/intern/callbacks.c b/source/blender/blenkernel/intern/callbacks.c index 11ee9492b44..87d5961b12e 100644 --- a/source/blender/blenkernel/intern/callbacks.c +++ b/source/blender/blenkernel/intern/callbacks.c @@ -80,6 +80,15 @@ void BKE_callback_add(bCallbackFuncStore *funcstore, eCbEvent evt) BLI_addtail(lb, funcstore); } +void BKE_callback_remove(bCallbackFuncStore *funcstore, eCbEvent evt) +{ + ListBase *lb = &callback_slots[evt]; + BLI_remlink(lb, funcstore); + if (funcstore->alloc) { + MEM_freeN(funcstore); + } +} + void BKE_callback_global_init(void) { /* do nothing */ @@ -95,10 +104,7 @@ void BKE_callback_global_finalize(void) bCallbackFuncStore *funcstore_next; for (funcstore = lb->first; funcstore; funcstore = funcstore_next) { funcstore_next = funcstore->next; - BLI_remlink(lb, funcstore); - if (funcstore->alloc) { - MEM_freeN(funcstore); - } + BKE_callback_remove(funcstore, evt); } } } diff --git a/source/blender/blenkernel/intern/collection.c b/source/blender/blenkernel/intern/collection.c index 2d172f23428..8e50b9e9534 100644 --- a/source/blender/blenkernel/intern/collection.c +++ b/source/blender/blenkernel/intern/collection.c @@ -597,7 +597,7 @@ static Collection *collection_duplicate_recursive(Main *bmain, } else if (collection_old->id.newid == NULL) { collection_new = (Collection *)BKE_id_copy_for_duplicate( - bmain, (ID *)collection_old, duplicate_flags); + bmain, (ID *)collection_old, duplicate_flags, LIB_ID_COPY_DEFAULT); if (collection_new == collection_old) { return collection_new; diff --git a/source/blender/blenkernel/intern/colortools.c b/source/blender/blenkernel/intern/colortools.c index f2c2e552a9f..62b817487fc 100644 --- a/source/blender/blenkernel/intern/colortools.c +++ b/source/blender/blenkernel/intern/colortools.c @@ -1212,6 +1212,20 @@ void BKE_curvemapping_init(CurveMapping *cumap) } } +void BKE_curvemapping_table_F(const CurveMapping *cumap, float **array, int *size) +{ + int a; + + *size = CM_TABLE + 1; + *array = MEM_callocN(sizeof(float) * (*size) * 4, "CurveMapping"); + + for (a = 0; a < *size; a++) { + if (cumap->cm[0].table) { + (*array)[a * 4 + 0] = cumap->cm[0].table[a].y; + } + } +} + void BKE_curvemapping_table_RGBA(const CurveMapping *cumap, float **array, int *size) { int a; diff --git a/source/blender/blenkernel/intern/constraint.c b/source/blender/blenkernel/intern/constraint.c index b9b15eba6a4..b2b03d28483 100644 --- a/source/blender/blenkernel/intern/constraint.c +++ b/source/blender/blenkernel/intern/constraint.c @@ -3499,7 +3499,7 @@ static void stretchto_new_data(void *cdata) bStretchToConstraint *data = (bStretchToConstraint *)cdata; data->volmode = 0; - data->plane = 0; + data->plane = SWING_Y; data->orglength = 0.0; data->bulge = 1.0; data->bulge_max = 1.0f; diff --git a/source/blender/blenkernel/intern/curve.c b/source/blender/blenkernel/intern/curve.c index b0d196b2bb0..0dcfea78ca5 100644 --- a/source/blender/blenkernel/intern/curve.c +++ b/source/blender/blenkernel/intern/curve.c @@ -404,6 +404,7 @@ void BKE_curve_init(Curve *cu, const short curve_type) } else if (cu->type == OB_SURF) { cu->flag |= CU_3D; + cu->resolu = 4; cu->resolv = 4; } cu->bevel_profile = NULL; diff --git a/source/blender/blenkernel/intern/customdata.c b/source/blender/blenkernel/intern/customdata.c index ad2d5d267d5..3bb02e1856b 100644 --- a/source/blender/blenkernel/intern/customdata.c +++ b/source/blender/blenkernel/intern/customdata.c @@ -1856,6 +1856,8 @@ static const LayerTypeInfo LAYERTYPEINFO[CD_NUMTYPES] = { NULL, NULL, NULL}, + /* 51: CD_HAIRLENGTH */ + {sizeof(float), "float", 1, NULL, NULL, NULL, NULL, NULL, NULL}, }; static const char *LAYERTYPENAMES[CD_NUMTYPES] = { @@ -1912,6 +1914,7 @@ static const char *LAYERTYPENAMES[CD_NUMTYPES] = { "CDPropFloat3", "CDPropFloat2", "CDPropBoolean", + "CDHairLength", }; const CustomData_MeshMasks CD_MASK_BAREMESH = { diff --git a/source/blender/blenkernel/intern/displist.cc b/source/blender/blenkernel/intern/displist.cc index e756daa1156..0776f3b9a68 100644 --- a/source/blender/blenkernel/intern/displist.cc +++ b/source/blender/blenkernel/intern/displist.cc @@ -261,7 +261,7 @@ bool BKE_displist_surfindex_get( return true; } -/* ****************** make displists ********************* */ +/* ****************** Make #DispList ********************* */ #ifdef __INTEL_COMPILER /* ICC with the optimization -02 causes crashes. */ # pragma intel optimization_level 1 @@ -1540,23 +1540,6 @@ void BKE_displist_make_curveTypes(Depsgraph *depsgraph, boundbox_displist_object(ob); } -void BKE_displist_make_curveTypes_forRender( - Depsgraph *depsgraph, const Scene *scene, Object *ob, ListBase *r_dispbase, Mesh **r_final) -{ - if (ob->runtime.curve_cache == nullptr) { - ob->runtime.curve_cache = (CurveCache *)MEM_callocN(sizeof(CurveCache), __func__); - } - - if (ob->type == OB_SURF) { - evaluate_surface_object(depsgraph, scene, ob, true, r_dispbase, r_final); - } - else { - GeometrySet geometry_set = evaluate_curve_type_object(depsgraph, scene, ob, true, r_dispbase); - MeshComponent &mesh_component = geometry_set.get_component_for_write<MeshComponent>(); - *r_final = mesh_component.release(); - } -} - void BKE_displist_minmax(const ListBase *dispbase, float min[3], float max[3]) { bool doit = false; diff --git a/source/blender/blenkernel/intern/dynamicpaint.c b/source/blender/blenkernel/intern/dynamicpaint.c index d75b3259148..9083c507160 100644 --- a/source/blender/blenkernel/intern/dynamicpaint.c +++ b/source/blender/blenkernel/intern/dynamicpaint.c @@ -317,7 +317,7 @@ static bool setError(DynamicPaintCanvasSettings *canvas, const char *string) static int dynamicPaint_surfaceNumOfPoints(DynamicPaintSurface *surface) { if (surface->format == MOD_DPAINT_SURFACE_F_PTEX) { - return 0; /* not supported atm */ + return 0; /* Not supported at the moment. */ } if (surface->format == MOD_DPAINT_SURFACE_F_VERTEX) { const Mesh *canvas_mesh = dynamicPaint_canvas_mesh_get(surface->canvas); @@ -1231,7 +1231,7 @@ void dynamicPaint_Modifier_copy(const struct DynamicPaintModifierData *pmd, /* copy existing surfaces */ for (surface = pmd->canvas->surfaces.first; surface; surface = surface->next) { DynamicPaintSurface *t_surface = dynamicPaint_createNewSurface(tpmd->canvas, NULL); - if (flag & LIB_ID_CREATE_NO_MAIN) { + if (flag & LIB_ID_COPY_SET_COPIED_ON_WRITE) { /* TODO(sergey): Consider passing some tips to the surface * creation to avoid this allocate-and-free cache behavior. */ BKE_ptcache_free_list(&t_surface->ptcaches); diff --git a/source/blender/blenkernel/intern/fluid.c b/source/blender/blenkernel/intern/fluid.c index 1324b37f39c..e272b71acb8 100644 --- a/source/blender/blenkernel/intern/fluid.c +++ b/source/blender/blenkernel/intern/fluid.c @@ -5094,7 +5094,7 @@ void BKE_fluid_modifier_copy(const struct FluidModifierData *fmd, /* pointcache options */ BKE_ptcache_free_list(&(tfds->ptcaches[0])); - if (flag & LIB_ID_CREATE_NO_MAIN) { + if (flag & LIB_ID_COPY_SET_COPIED_ON_WRITE) { /* Share the cache with the original object's modifier. */ tfmd->modifier.flag |= eModifierFlag_SharedCaches; tfds->point_cache[0] = fds->point_cache[0]; diff --git a/source/blender/blenkernel/intern/font.c b/source/blender/blenkernel/intern/font.c index aa13f86523a..0e159418724 100644 --- a/source/blender/blenkernel/intern/font.c +++ b/source/blender/blenkernel/intern/font.c @@ -34,6 +34,7 @@ #include "BLI_ghash.h" #include "BLI_listbase.h" #include "BLI_math.h" +#include "BLI_math_base_safe.h" #include "BLI_path_util.h" #include "BLI_string.h" #include "BLI_string_utf8.h" @@ -490,15 +491,15 @@ static void build_underline(Curve *cu, mul_v2_fl(bp[3].vec, font_size); } -static void buildchar(Curve *cu, - ListBase *nubase, - unsigned int character, - CharInfo *info, - float ofsx, - float ofsy, - float rot, - int charidx, - const float fsize) +void BKE_vfont_build_char(Curve *cu, + ListBase *nubase, + unsigned int character, + CharInfo *info, + float ofsx, + float ofsy, + float rot, + int charidx, + const float fsize) { VFontData *vfd = vfont_get_data(which_vfont(cu, info)); if (!vfd) { @@ -794,8 +795,8 @@ static bool vfont_to_curve(Object *ob, bool ok = false; const float font_size = cu->fsize * iter_data->scale_to_fit; const bool word_wrap = iter_data->word_wrap; - const float xof_scale = cu->xof / font_size; - const float yof_scale = cu->yof / font_size; + const float xof_scale = safe_divide(cu->xof, font_size); + const float yof_scale = safe_divide(cu->yof, font_size); int last_line = -1; /* Length of the text disregarding \n breaks. */ float current_line_length = 0.0f; @@ -889,7 +890,7 @@ static bool vfont_to_curve(Object *ob, linedist = cu->linedist; curbox = 0; - textbox_scale(&tb_scale, &cu->tb[curbox], 1.0f / font_size); + textbox_scale(&tb_scale, &cu->tb[curbox], safe_divide(1.0f, font_size)); use_textbox = (tb_scale.w != 0.0f); xof = MARGIN_X_MIN; @@ -1525,7 +1526,7 @@ static bool vfont_to_curve(Object *ob, } /* We do not want to see any character for \n or \r */ if (cha != '\n') { - buildchar(cu, r_nubase, cha, info, ct->xof, ct->yof, ct->rot, i, font_size); + BKE_vfont_build_char(cu, r_nubase, cha, info, ct->xof, ct->yof, ct->rot, i, font_size); } if ((info->flag & CU_CHINFO_UNDERLINE) && (cha != '\n')) { diff --git a/source/blender/blenkernel/intern/geometry_component_curve.cc b/source/blender/blenkernel/intern/geometry_component_curve.cc index 7d0537178ef..73c628d3f0f 100644 --- a/source/blender/blenkernel/intern/geometry_component_curve.cc +++ b/source/blender/blenkernel/intern/geometry_component_curve.cc @@ -535,6 +535,9 @@ static GVMutableArrayPtr make_cyclic_write_attribute(CurveEval &curve) * array implementations try to make it workable in common situations. * \{ */ +/** + * Individual spans in \a data may be empty if that spline contains no data for the attribute. + */ template<typename T> static void point_attribute_materialize(Span<Span<T>> data, Span<int> offsets, @@ -546,7 +549,15 @@ static void point_attribute_materialize(Span<Span<T>> data, for (const int spline_index : data.index_range()) { const int offset = offsets[spline_index]; const int next_offset = offsets[spline_index + 1]; - r_span.slice(offset, next_offset - offset).copy_from(data[spline_index]); + + Span<T> src = data[spline_index]; + MutableSpan<T> dst = r_span.slice(offset, next_offset - offset); + if (src.is_empty()) { + dst.fill(T()); + } + else { + dst.copy_from(src); + } } } else { @@ -557,11 +568,20 @@ static void point_attribute_materialize(Span<Span<T>> data, } const int index_in_spline = dst_index - offsets[spline_index]; - r_span[dst_index] = data[spline_index][index_in_spline]; + Span<T> src = data[spline_index]; + if (src.is_empty()) { + r_span[dst_index] = T(); + } + else { + r_span[dst_index] = src[index_in_spline]; + } } } } +/** + * Individual spans in \a data may be empty if that spline contains no data for the attribute. + */ template<typename T> static void point_attribute_materialize_to_uninitialized(Span<Span<T>> data, Span<int> offsets, @@ -574,7 +594,14 @@ static void point_attribute_materialize_to_uninitialized(Span<Span<T>> data, for (const int spline_index : data.index_range()) { const int offset = offsets[spline_index]; const int next_offset = offsets[spline_index + 1]; - uninitialized_copy_n(data[spline_index].data(), next_offset - offset, dst + offset); + + Span<T> src = data[spline_index]; + if (src.is_empty()) { + uninitialized_fill_n(dst + offset, next_offset - offset, T()); + } + else { + uninitialized_copy_n(src.data(), next_offset - offset, dst + offset); + } } } else { @@ -585,7 +612,13 @@ static void point_attribute_materialize_to_uninitialized(Span<Span<T>> data, } const int index_in_spline = dst_index - offsets[spline_index]; - new (dst + dst_index) T(data[spline_index][index_in_spline]); + Span<T> src = data[spline_index]; + if (src.is_empty()) { + new (dst + dst_index) T(); + } + else { + new (dst + dst_index) T(src[index_in_spline]); + } } } } @@ -769,6 +802,169 @@ class VMutableArray_For_SplinePosition final : public VMutableArray<float3> { } }; +class VArray_For_BezierHandle final : public VArray<float3> { + private: + Span<SplinePtr> splines_; + Array<int> offsets_; + bool is_right_; + + public: + VArray_For_BezierHandle(Span<SplinePtr> splines, Array<int> offsets, const bool is_right) + : VArray<float3>(offsets.last()), + splines_(std::move(splines)), + offsets_(std::move(offsets)), + is_right_(is_right) + { + } + + static float3 get_internal(const int64_t index, + Span<SplinePtr> splines, + Span<int> offsets, + const bool is_right) + { + const PointIndices indices = lookup_point_indices(offsets, index); + const Spline &spline = *splines[indices.spline_index]; + if (spline.type() == Spline::Type::Bezier) { + const BezierSpline &bezier_spline = static_cast<const BezierSpline &>(spline); + return is_right ? bezier_spline.handle_positions_right()[indices.point_index] : + bezier_spline.handle_positions_left()[indices.point_index]; + } + return float3(0); + } + + float3 get_impl(const int64_t index) const final + { + return get_internal(index, splines_, offsets_, is_right_); + } + + /** + * Utility so we can pass handle positions to the materialize functions above. + * + * \note This relies on the ability of the materialize implementations to + * handle empty spans, since only Bezier splines have handles. + */ + static Array<Span<float3>> get_handle_spans(Span<SplinePtr> splines, const bool is_right) + { + Array<Span<float3>> spans(splines.size()); + for (const int i : spans.index_range()) { + if (splines[i]->type() == Spline::Type::Bezier) { + BezierSpline &bezier_spline = static_cast<BezierSpline &>(*splines[i]); + spans[i] = is_right ? bezier_spline.handle_positions_right() : + bezier_spline.handle_positions_left(); + } + else { + spans[i] = {}; + } + } + return spans; + } + + static void materialize_internal(const IndexMask mask, + Span<SplinePtr> splines, + Span<int> offsets, + const bool is_right, + MutableSpan<float3> r_span) + { + Array<Span<float3>> spans = get_handle_spans(splines, is_right); + point_attribute_materialize(spans.as_span(), offsets, mask, r_span); + } + + static void materialize_to_uninitialized_internal(const IndexMask mask, + Span<SplinePtr> splines, + Span<int> offsets, + const bool is_right, + MutableSpan<float3> r_span) + { + Array<Span<float3>> spans = get_handle_spans(splines, is_right); + point_attribute_materialize_to_uninitialized(spans.as_span(), offsets, mask, r_span); + } + + void materialize_impl(const IndexMask mask, MutableSpan<float3> r_span) const final + { + materialize_internal(mask, splines_, offsets_, is_right_, r_span); + } + + void materialize_to_uninitialized_impl(const IndexMask mask, + MutableSpan<float3> r_span) const final + { + materialize_to_uninitialized_internal(mask, splines_, offsets_, is_right_, r_span); + } +}; + +class VMutableArray_For_BezierHandles final : public VMutableArray<float3> { + private: + MutableSpan<SplinePtr> splines_; + Array<int> offsets_; + bool is_right_; + + public: + VMutableArray_For_BezierHandles(MutableSpan<SplinePtr> splines, + Array<int> offsets, + const bool is_right) + : VMutableArray<float3>(offsets.last()), + splines_(splines), + offsets_(std::move(offsets)), + is_right_(is_right) + { + } + + float3 get_impl(const int64_t index) const final + { + return VArray_For_BezierHandle::get_internal(index, splines_, offsets_, is_right_); + } + + void set_impl(const int64_t index, float3 value) final + { + const PointIndices indices = lookup_point_indices(offsets_, index); + Spline &spline = *splines_[indices.spline_index]; + if (spline.type() == Spline::Type::Bezier) { + BezierSpline &bezier_spline = static_cast<BezierSpline &>(spline); + if (is_right_) { + bezier_spline.set_handle_position_right(indices.point_index, value); + } + else { + bezier_spline.set_handle_position_left(indices.point_index, value); + } + bezier_spline.mark_cache_invalid(); + } + } + + void set_all_impl(Span<float3> src) final + { + for (const int spline_index : splines_.index_range()) { + Spline &spline = *splines_[spline_index]; + if (spline.type() == Spline::Type::Bezier) { + const int offset = offsets_[spline_index]; + + BezierSpline &bezier_spline = static_cast<BezierSpline &>(spline); + if (is_right_) { + for (const int i : IndexRange(bezier_spline.size())) { + bezier_spline.set_handle_position_right(i, src[offset + i]); + } + } + else { + for (const int i : IndexRange(bezier_spline.size())) { + bezier_spline.set_handle_position_left(i, src[offset + i]); + } + } + bezier_spline.mark_cache_invalid(); + } + } + } + + void materialize_impl(const IndexMask mask, MutableSpan<float3> r_span) const final + { + VArray_For_BezierHandle::materialize_internal(mask, splines_, offsets_, is_right_, r_span); + } + + void materialize_to_uninitialized_impl(const IndexMask mask, + MutableSpan<float3> r_span) const final + { + VArray_For_BezierHandle::materialize_to_uninitialized_internal( + mask, splines_, offsets_, is_right_, r_span); + } +}; + /** * Provider for any builtin control point attribute that doesn't need * special handling like access to other arrays in the spline. @@ -906,6 +1102,78 @@ class PositionAttributeProvider final : public BuiltinPointAttributeProvider<flo } }; +class BezierHandleAttributeProvider : public BuiltinAttributeProvider { + private: + bool is_right_; + + public: + BezierHandleAttributeProvider(const bool is_right) + : BuiltinAttributeProvider(is_right ? "handle_right" : "handle_left", + ATTR_DOMAIN_POINT, + CD_PROP_FLOAT3, + BuiltinAttributeProvider::NonCreatable, + BuiltinAttributeProvider::Writable, + BuiltinAttributeProvider::NonDeletable), + is_right_(is_right) + { + } + + GVArrayPtr try_get_for_read(const GeometryComponent &component) const override + { + const CurveEval *curve = get_curve_from_component_for_read(component); + if (curve == nullptr) { + return {}; + } + + if (!curve->has_spline_with_type(Spline::Type::Bezier)) { + return {}; + } + + Array<int> offsets = curve->control_point_offsets(); + return std::make_unique<fn::GVArray_For_EmbeddedVArray<float3, VArray_For_BezierHandle>>( + offsets.last(), curve->splines(), std::move(offsets), is_right_); + } + + GVMutableArrayPtr try_get_for_write(GeometryComponent &component) const override + { + CurveEval *curve = get_curve_from_component_for_write(component); + if (curve == nullptr) { + return {}; + } + + if (!curve->has_spline_with_type(Spline::Type::Bezier)) { + return {}; + } + + Array<int> offsets = curve->control_point_offsets(); + return std::make_unique< + fn::GVMutableArray_For_EmbeddedVMutableArray<float3, VMutableArray_For_BezierHandles>>( + offsets.last(), curve->splines(), std::move(offsets), is_right_); + } + + bool try_delete(GeometryComponent &UNUSED(component)) const final + { + return false; + } + + bool try_create(GeometryComponent &UNUSED(component), + const AttributeInit &UNUSED(initializer)) const final + { + return false; + } + + bool exists(const GeometryComponent &component) const final + { + const CurveEval *curve = get_curve_from_component_for_read(component); + if (curve == nullptr) { + return false; + } + + return curve->has_spline_with_type(Spline::Type::Bezier) && + component.attribute_domain_size(ATTR_DOMAIN_POINT) != 0; + } +}; + /** \} */ /* -------------------------------------------------------------------- */ @@ -1196,6 +1464,8 @@ static ComponentAttributeProviders create_attribute_providers_for_curve() spline_custom_data_access); static PositionAttributeProvider position; + static BezierHandleAttributeProvider handles_start(false); + static BezierHandleAttributeProvider handles_end(true); static BuiltinPointAttributeProvider<float> radius( "radius", @@ -1213,8 +1483,9 @@ static ComponentAttributeProviders create_attribute_providers_for_curve() static DynamicPointAttributeProvider point_custom_data; - return ComponentAttributeProviders({&position, &radius, &tilt, &resolution, &cyclic}, - {&spline_custom_data, &point_custom_data}); + return ComponentAttributeProviders( + {&position, &radius, &tilt, &handles_start, &handles_end, &resolution, &cyclic}, + {&spline_custom_data, &point_custom_data}); } } // namespace blender::bke diff --git a/source/blender/blenkernel/intern/geometry_component_instances.cc b/source/blender/blenkernel/intern/geometry_component_instances.cc index 9479d012cb8..4204d62e1a7 100644 --- a/source/blender/blenkernel/intern/geometry_component_instances.cc +++ b/source/blender/blenkernel/intern/geometry_component_instances.cc @@ -14,11 +14,14 @@ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ +#include <mutex> + #include "BLI_float4x4.hh" #include "BLI_map.hh" #include "BLI_rand.hh" #include "BLI_set.hh" #include "BLI_span.hh" +#include "BLI_task.hh" #include "BLI_vector.hh" #include "DNA_collection_types.h" @@ -122,44 +125,14 @@ blender::Span<int> InstancesComponent::instance_ids() const } /** - * If references have a collection or object type, convert them into geometry instances. This - * will join geometry components from nested instances if necessary. After that, the geometry - * sets can be edited. - */ -void InstancesComponent::ensure_geometry_instances() -{ - VectorSet<InstanceReference> new_references; - new_references.reserve(references_.size()); - for (const InstanceReference &reference : references_) { - if (reference.type() == InstanceReference::Type::Object) { - GeometrySet geometry_set; - InstancesComponent &instances = geometry_set.get_component_for_write<InstancesComponent>(); - const int handle = instances.add_reference(reference.object()); - instances.add_instance(handle, float4x4::identity()); - new_references.add_new(geometry_set); - } - else if (reference.type() == InstanceReference::Type::Collection) { - GeometrySet geometry_set; - InstancesComponent &instances = geometry_set.get_component_for_write<InstancesComponent>(); - const int handle = instances.add_reference(reference.collection()); - instances.add_instance(handle, float4x4::identity()); - new_references.add_new(geometry_set); - } - else { - new_references.add_new(reference); - } - } - references_ = std::move(new_references); -} - -/** * With write access to the instances component, the data in the instanced geometry sets can be * changed. This is a function on the component rather than each reference to ensure `const` * correctness for that reason. */ GeometrySet &InstancesComponent::geometry_set_from_reference(const int reference_index) { - /* If this assert fails, it means #ensure_geometry_instances must be called first. */ + /* If this assert fails, it means #ensure_geometry_instances must be called first or that the + * reference can't be converted to a geometry set. */ BLI_assert(references_[reference_index].type() == InstanceReference::Type::GeometrySet); /* The const cast is okay because the instance's hash in the set @@ -182,6 +155,86 @@ blender::Span<InstanceReference> InstancesComponent::references() const return references_; } +void InstancesComponent::remove_unused_references() +{ + using namespace blender; + using namespace blender::bke; + + const int tot_instances = this->instances_amount(); + const int tot_references_before = references_.size(); + + if (tot_instances == 0) { + /* If there are no instances, no reference is needed. */ + references_.clear(); + return; + } + if (tot_references_before == 1) { + /* There is only one reference and at least one instance. So the only existing reference is + * used. Nothing to do here. */ + return; + } + + Array<bool> usage_by_handle(tot_references_before, false); + std::mutex mutex; + + /* Loop over all instances to see which references are used. */ + threading::parallel_for(IndexRange(tot_instances), 1000, [&](IndexRange range) { + /* Use local counter to avoid lock contention. */ + Array<bool> local_usage_by_handle(tot_references_before, false); + + for (const int i : range) { + const int handle = instance_reference_handles_[i]; + BLI_assert(handle >= 0 && handle < tot_references_before); + local_usage_by_handle[handle] = true; + } + + std::lock_guard lock{mutex}; + for (const int i : IndexRange(tot_references_before)) { + usage_by_handle[i] |= local_usage_by_handle[i]; + } + }); + + if (!usage_by_handle.as_span().contains(false)) { + /* All references are used. */ + return; + } + + /* Create new references and a mapping for the handles. */ + Vector<int> handle_mapping; + VectorSet<InstanceReference> new_references; + int next_new_handle = 0; + bool handles_have_to_be_updated = false; + for (const int old_handle : IndexRange(tot_references_before)) { + if (!usage_by_handle[old_handle]) { + /* Add some dummy value. It won't be read again. */ + handle_mapping.append(-1); + } + else { + const InstanceReference &reference = references_[old_handle]; + handle_mapping.append(next_new_handle); + new_references.add_new(reference); + if (old_handle != next_new_handle) { + handles_have_to_be_updated = true; + } + next_new_handle++; + } + } + references_ = new_references; + + if (!handles_have_to_be_updated) { + /* All remaining handles are the same as before, so they don't have to be updated. This happens + * when unused handles are only at the end. */ + return; + } + + /* Update handles of instances. */ + threading::parallel_for(IndexRange(tot_instances), 1000, [&](IndexRange range) { + for (const int i : range) { + instance_reference_handles_[i] = handle_mapping[instance_reference_handles_[i]]; + } + }); +} + int InstancesComponent::instances_amount() const { return instance_transforms_.size(); diff --git a/source/blender/blenkernel/intern/geometry_set.cc b/source/blender/blenkernel/intern/geometry_set.cc index e717d289894..0aac6ae3adf 100644 --- a/source/blender/blenkernel/intern/geometry_set.cc +++ b/source/blender/blenkernel/intern/geometry_set.cc @@ -15,6 +15,7 @@ */ #include "BLI_map.hh" +#include "BLI_task.hh" #include "BKE_attribute.h" #include "BKE_attribute_access.hh" @@ -151,6 +152,19 @@ void GeometrySet::remove(const GeometryComponentType component_type) components_.remove(component_type); } +/** + * Remove all geometry components with types that are not in the provided list. + */ +void GeometrySet::keep_only(const blender::Span<GeometryComponentType> component_types) +{ + for (auto it = components_.keys().begin(); it != components_.keys().end(); ++it) { + const GeometryComponentType type = *it; + if (!component_types.contains(type)) { + components_.remove(it); + } + } +} + void GeometrySet::add(const GeometryComponent &component) { BLI_assert(!components_.contains(component.type())); @@ -291,6 +305,29 @@ bool GeometrySet::has_curve() const return component != nullptr && component->has_curve(); } +/* Returns true when the geometry set has any data that is not an instance. */ +bool GeometrySet::has_realized_data() const +{ + if (components_.is_empty()) { + return false; + } + if (components_.size() > 1) { + return true; + } + /* Check if the only component is an #InstancesComponent. */ + return this->get_component_for_read<InstancesComponent>() == nullptr; +} + +/* Return true if the geometry set has any component that isn't empty. */ +bool GeometrySet::is_empty() const +{ + if (components_.is_empty()) { + return true; + } + return !(this->has_mesh() || this->has_curve() || this->has_pointcloud() || + this->has_instances()); +} + /* Create a new geometry set that only contains the given mesh. */ GeometrySet GeometrySet::create_with_mesh(Mesh *mesh, GeometryOwnershipType ownership) { @@ -375,6 +412,108 @@ CurveEval *GeometrySet::get_curve_for_write() return component.get_for_write(); } +void GeometrySet::attribute_foreach(const Span<GeometryComponentType> component_types, + const bool include_instances, + const AttributeForeachCallback callback) const +{ + using namespace blender; + using namespace blender::bke; + for (const GeometryComponentType component_type : component_types) { + if (!this->has(component_type)) { + continue; + } + const GeometryComponent &component = *this->get_component_for_read(component_type); + component.attribute_foreach( + [&](const AttributeIDRef &attribute_id, const AttributeMetaData &meta_data) { + callback(attribute_id, meta_data, component); + return true; + }); + } + if (include_instances && this->has_instances()) { + const InstancesComponent &instances = *this->get_component_for_read<InstancesComponent>(); + instances.foreach_referenced_geometry([&](const GeometrySet &instance_geometry_set) { + instance_geometry_set.attribute_foreach(component_types, include_instances, callback); + }); + } +} + +void GeometrySet::gather_attributes_for_propagation( + const Span<GeometryComponentType> component_types, + const GeometryComponentType dst_component_type, + bool include_instances, + blender::Map<blender::bke::AttributeIDRef, AttributeKind> &r_attributes) const +{ + using namespace blender; + using namespace blender::bke; + /* Only needed right now to check if an attribute is built-in on this component type. + * TODO: Get rid of the dummy component. */ + const GeometryComponent *dummy_component = GeometryComponent::create(dst_component_type); + this->attribute_foreach( + component_types, + include_instances, + [&](const AttributeIDRef &attribute_id, + const AttributeMetaData &meta_data, + const GeometryComponent &component) { + if (component.attribute_is_builtin(attribute_id)) { + if (!dummy_component->attribute_is_builtin(attribute_id)) { + /* Don't propagate built-in attributes that are not built-in on the destination + * component. */ + return; + } + } + if (attribute_id.is_anonymous()) { + if (!BKE_anonymous_attribute_id_has_strong_references(&attribute_id.anonymous_id())) { + /* Don't propagate anonymous attributes that are not used anymore. */ + return; + } + } + auto add_info = [&](AttributeKind *attribute_kind) { + attribute_kind->domain = meta_data.domain; + attribute_kind->data_type = meta_data.data_type; + }; + auto modify_info = [&](AttributeKind *attribute_kind) { + attribute_kind->domain = bke::attribute_domain_highest_priority( + {attribute_kind->domain, meta_data.domain}); + attribute_kind->data_type = bke::attribute_data_type_highest_complexity( + {attribute_kind->data_type, meta_data.data_type}); + }; + r_attributes.add_or_modify(attribute_id, add_info, modify_info); + }); + delete dummy_component; +} + +static void gather_mutable_geometry_sets(GeometrySet &geometry_set, + Vector<GeometrySet *> &r_geometry_sets) +{ + r_geometry_sets.append(&geometry_set); + if (!geometry_set.has_instances()) { + return; + } + /* In the future this can be improved by deduplicating instance references across different + * instances. */ + InstancesComponent &instances_component = + geometry_set.get_component_for_write<InstancesComponent>(); + instances_component.ensure_geometry_instances(); + for (const int handle : instances_component.references().index_range()) { + if (instances_component.references()[handle].type() == InstanceReference::Type::GeometrySet) { + GeometrySet &instance_geometry = instances_component.geometry_set_from_reference(handle); + gather_mutable_geometry_sets(instance_geometry, r_geometry_sets); + } + } +} + +/** + * Modify every (recursive) instance separately. This is often more efficient than realizing all + * instances just to change the same thing on all of them. + */ +void GeometrySet::modify_geometry_sets(ForeachSubGeometryCallback callback) +{ + Vector<GeometrySet *> geometry_sets; + gather_mutable_geometry_sets(*this, geometry_sets); + blender::threading::parallel_for_each( + geometry_sets, [&](GeometrySet *geometry_set) { callback(*geometry_set); }); +} + /** \} */ /* -------------------------------------------------------------------- */ diff --git a/source/blender/blenkernel/intern/geometry_set_instances.cc b/source/blender/blenkernel/intern/geometry_set_instances.cc index 9dca2c2907e..77348c3d22c 100644 --- a/source/blender/blenkernel/intern/geometry_set_instances.cc +++ b/source/blender/blenkernel/intern/geometry_set_instances.cc @@ -14,6 +14,7 @@ * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ +#include "BKE_collection.h" #include "BKE_geometry_set_instances.hh" #include "BKE_material.h" #include "BKE_mesh.h" @@ -23,6 +24,7 @@ #include "BKE_spline.hh" #include "DNA_collection_types.h" +#include "DNA_layer_types.h" #include "DNA_mesh_types.h" #include "DNA_meshdata_types.h" #include "DNA_object_types.h" @@ -187,134 +189,6 @@ void geometry_set_gather_instances(const GeometrySet &geometry_set, geometry_set_collect_recursive(geometry_set, unit_transform, r_instance_groups); } -static bool collection_instance_attribute_foreach(const Collection &collection, - const AttributeForeachCallback callback, - const int limit, - int &count); - -static bool instances_attribute_foreach_recursive(const GeometrySet &geometry_set, - const AttributeForeachCallback callback, - const int limit, - int &count); - -static bool object_instance_attribute_foreach(const Object &object, - const AttributeForeachCallback callback, - const int limit, - int &count) -{ - GeometrySet instance_geometry_set = object_get_geometry_set_for_read(object); - if (!instances_attribute_foreach_recursive(instance_geometry_set, callback, limit, count)) { - return false; - } - - if (object.type == OB_EMPTY) { - const Collection *collection_instance = object.instance_collection; - if (collection_instance != nullptr) { - if (!collection_instance_attribute_foreach(*collection_instance, callback, limit, count)) { - return false; - } - } - } - return true; -} - -static bool collection_instance_attribute_foreach(const Collection &collection, - const AttributeForeachCallback callback, - const int limit, - int &count) -{ - LISTBASE_FOREACH (const CollectionObject *, collection_object, &collection.gobject) { - BLI_assert(collection_object->ob != nullptr); - const Object &object = *collection_object->ob; - if (!object_instance_attribute_foreach(object, callback, limit, count)) { - return false; - } - } - LISTBASE_FOREACH (const CollectionChild *, collection_child, &collection.children) { - BLI_assert(collection_child->collection != nullptr); - const Collection &collection = *collection_child->collection; - if (!collection_instance_attribute_foreach(collection, callback, limit, count)) { - return false; - } - } - return true; -} - -/** - * \return True if the recursive iteration should continue, false if the limit is reached or the - * callback has returned false indicating it should stop. - */ -static bool instances_attribute_foreach_recursive(const GeometrySet &geometry_set, - const AttributeForeachCallback callback, - const int limit, - int &count) -{ - for (const GeometryComponent *component : geometry_set.get_components_for_read()) { - if (!component->attribute_foreach(callback)) { - return false; - } - } - - /* Now that this geometry set is visited, increase the count and check with the limit. */ - if (limit > 0 && count++ > limit) { - return false; - } - - const InstancesComponent *instances_component = - geometry_set.get_component_for_read<InstancesComponent>(); - if (instances_component == nullptr) { - return true; - } - - for (const InstanceReference &reference : instances_component->references()) { - switch (reference.type()) { - case InstanceReference::Type::Object: { - const Object &object = reference.object(); - if (!object_instance_attribute_foreach(object, callback, limit, count)) { - return false; - } - break; - } - case InstanceReference::Type::Collection: { - const Collection &collection = reference.collection(); - if (!collection_instance_attribute_foreach(collection, callback, limit, count)) { - return false; - } - break; - } - case InstanceReference::Type::GeometrySet: { - const GeometrySet &geometry_set = reference.geometry_set(); - if (!instances_attribute_foreach_recursive(geometry_set, callback, limit, count)) { - return false; - } - break; - } - case InstanceReference::Type::None: { - break; - } - } - } - - return true; -} - -/** - * Call the callback on all of this geometry set's components, including geometry sets from - * instances and recursive instances. This is necessary to access available attributes without - * making all of the set's geometry real. - * - * \param limit: The total number of geometry sets to visit before returning early. This is used - * to avoid looking through too many geometry sets recursively, as an explicit tradeoff in favor - * of performance at the cost of visiting every unique attribute. - */ -void geometry_set_instances_attribute_foreach(const GeometrySet &geometry_set, - const AttributeForeachCallback callback, - const int limit) -{ - int count = 0; - instances_attribute_foreach_recursive(geometry_set, callback, limit, count); -} - void geometry_set_gather_instances_attribute_info(Span<GeometryInstanceGroup> set_groups, Span<GeometryComponentType> component_types, const Set<std::string> &ignored_attributes, @@ -700,7 +574,7 @@ static void join_instance_groups_curve(Span<GeometryInstanceGroup> set_groups, G geometry_set_gather_instances_attribute_info( set_groups, {GEO_COMPONENT_TYPE_CURVE}, - {"position", "radius", "tilt", "cyclic", "resolution"}, + {"position", "radius", "tilt", "handle_left", "handle_right", "cyclic", "resolution"}, attributes); join_attributes(set_groups, {GEO_COMPONENT_TYPE_CURVE}, @@ -745,3 +619,91 @@ GeometrySet geometry_set_realize_instances(const GeometrySet &geometry_set) } } // namespace blender::bke + +void InstancesComponent::foreach_referenced_geometry( + blender::FunctionRef<void(const GeometrySet &geometry_set)> callback) const +{ + using namespace blender::bke; + for (const InstanceReference &reference : references_) { + switch (reference.type()) { + case InstanceReference::Type::Object: { + const Object &object = reference.object(); + const GeometrySet object_geometry_set = object_get_geometry_set_for_read(object); + callback(object_geometry_set); + break; + } + case InstanceReference::Type::Collection: { + Collection &collection = reference.collection(); + FOREACH_COLLECTION_OBJECT_RECURSIVE_BEGIN (&collection, object) { + const GeometrySet object_geometry_set = object_get_geometry_set_for_read(*object); + callback(object_geometry_set); + } + FOREACH_COLLECTION_OBJECT_RECURSIVE_END; + break; + } + case InstanceReference::Type::GeometrySet: { + const GeometrySet &instance_geometry_set = reference.geometry_set(); + callback(instance_geometry_set); + break; + } + case InstanceReference::Type::None: { + break; + } + } + } +} + +/** + * If references have a collection or object type, convert them into geometry instances + * recursively. After that, the geometry sets can be edited. There may still be instances of other + * types of they can't be converted to geometry sets. + */ +void InstancesComponent::ensure_geometry_instances() +{ + using namespace blender; + using namespace blender::bke; + VectorSet<InstanceReference> new_references; + new_references.reserve(references_.size()); + for (const InstanceReference &reference : references_) { + switch (reference.type()) { + case InstanceReference::Type::None: + case InstanceReference::Type::GeometrySet: { + /* Those references can stay as their were. */ + new_references.add_new(reference); + break; + } + case InstanceReference::Type::Object: { + /* Create a new reference that contains the geometry set of the object. We may want to + * treat e.g. lamps and similar object types separately here. */ + const Object &object = reference.object(); + GeometrySet object_geometry_set = object_get_geometry_set_for_read(object); + if (object_geometry_set.has_instances()) { + InstancesComponent &component = + object_geometry_set.get_component_for_write<InstancesComponent>(); + component.ensure_geometry_instances(); + } + new_references.add_new(std::move(object_geometry_set)); + break; + } + case InstanceReference::Type::Collection: { + /* Create a new reference that contains a geometry set that contains all objects from the + * collection as instances. */ + GeometrySet collection_geometry_set; + InstancesComponent &component = + collection_geometry_set.get_component_for_write<InstancesComponent>(); + Collection &collection = reference.collection(); + FOREACH_COLLECTION_OBJECT_RECURSIVE_BEGIN (&collection, object) { + const int handle = component.add_reference(*object); + component.add_instance(handle, object->obmat); + float4x4 &transform = component.instance_transforms().last(); + sub_v3_v3(transform.values[3], collection.instance_offset); + } + FOREACH_COLLECTION_OBJECT_RECURSIVE_END; + component.ensure_geometry_instances(); + new_references.add_new(std::move(collection_geometry_set)); + break; + } + } + } + references_ = std::move(new_references); +} diff --git a/source/blender/blenkernel/intern/gpencil.c b/source/blender/blenkernel/intern/gpencil.c index 82a44afbbb1..ed84694a919 100644 --- a/source/blender/blenkernel/intern/gpencil.c +++ b/source/blender/blenkernel/intern/gpencil.c @@ -319,7 +319,7 @@ IDTypeInfo IDType_ID_GD = { .name = "GPencil", .name_plural = "grease_pencils", .translation_context = BLT_I18NCONTEXT_ID_GPENCIL, - .flags = 0, + .flags = IDTYPE_FLAGS_APPEND_IS_REUSABLE, .init_data = NULL, .copy_data = greasepencil_copy_data, diff --git a/source/blender/blenkernel/intern/gpencil_curve.c b/source/blender/blenkernel/intern/gpencil_curve.c index 0752424df71..3819c0699f4 100644 --- a/source/blender/blenkernel/intern/gpencil_curve.c +++ b/source/blender/blenkernel/intern/gpencil_curve.c @@ -543,7 +543,7 @@ void BKE_gpencil_convert_curve(Main *bmain, int actcol = ob_gp->actcol; for (int slot = 1; slot <= ob_gp->totcol; slot++) { - while (slot <= ob_gp->totcol && !BKE_object_material_slot_used(ob_gp->data, slot)) { + while (slot <= ob_gp->totcol && !BKE_object_material_slot_used(ob_gp, slot)) { ob_gp->actcol = slot; BKE_object_material_slot_remove(bmain, ob_gp); diff --git a/source/blender/blenkernel/intern/gpencil_geom.cc b/source/blender/blenkernel/intern/gpencil_geom.cc index 976b26a1f3a..debdf44b0bb 100644 --- a/source/blender/blenkernel/intern/gpencil_geom.cc +++ b/source/blender/blenkernel/intern/gpencil_geom.cc @@ -738,8 +738,8 @@ bool BKE_gpencil_stroke_stretch(bGPDstroke *gps, sub_v3_v3v3(vec1, &gps->points[start_i].x, &gps->points[start_i + dir_i].x); /* In general curvature = 1/radius. For the case without the - * weights introduced by #segment_influence, the calculation is - * curvature = delta angle/delta arclength = len_v3(total_angle) / overshoot_length */ + * weights introduced by #segment_influence, the calculation is: + * `curvature = delta angle/delta arclength = len_v3(total_angle) / overshoot_length` */ float curvature = normalize_v3(total_angle) / overshoot_length; /* Compensate for the weights powf(added_len, segment_influence). */ curvature /= powf(overshoot_length / fminf(overshoot_parameter, (float)j), segment_influence); diff --git a/source/blender/blenkernel/intern/gpencil_modifier.c b/source/blender/blenkernel/intern/gpencil_modifier.c index 6be03bffb3c..a6164340477 100644 --- a/source/blender/blenkernel/intern/gpencil_modifier.c +++ b/source/blender/blenkernel/intern/gpencil_modifier.c @@ -65,7 +65,7 @@ static CLG_LogRef LOG = {"bke.gpencil_modifier"}; static GpencilModifierTypeInfo *modifier_gpencil_types[NUM_GREASEPENCIL_MODIFIER_TYPES] = {NULL}; #if 0 -/* Note that GPencil actually does not support these atm, but might do in the future. */ +/* Note that GPencil actually does not support these at the moment, but might do in the future. */ static GpencilVirtualModifierData virtualModifierCommonData; #endif @@ -129,7 +129,8 @@ GpencilModifierData *BKE_gpencil_modifiers_get_virtual_modifierlist( GpencilModifierData *md = ob->greasepencil_modifiers.first; #if 0 - /* Note that GPencil actually does not support these atm, but might do in the future. */ + /* Note that GPencil actually does not support these at the moment, + * but might do in the future. */ *virtualModifierData = virtualModifierCommonData; if (ob->parent) { if (ob->parent->type == OB_ARMATURE && ob->partype == PARSKEL) { @@ -328,8 +329,9 @@ void BKE_gpencil_modifier_init(void) gpencil_modifier_type_init(modifier_gpencil_types); /* MOD_gpencil_util.c */ #if 0 - /* Note that GPencil actually does not support these atm, but might do in the future. */ - /* Initialize global cmmon storage used for virtual modifier list */ + /* Note that GPencil actually does not support these at the moment, + * but might do in the future. */ + /* Initialize global common storage used for virtual modifier list. */ GpencilModifierData *md; md = BKE_gpencil_modifier_new(eGpencilModifierType_Armature); virtualModifierCommonData.amd = *((ArmatureGpencilModifierData *)md); @@ -518,7 +520,7 @@ static void gpencil_modifier_copy_data_id_us_cb(void *UNUSED(userData), * Copy grease pencil modifier data. * \param md: Source modifier data * \param target: Target modifier data - * \parm flag: Flags + * \param flag: Flags */ void BKE_gpencil_modifier_copydata_ex(GpencilModifierData *md, GpencilModifierData *target, diff --git a/source/blender/blenkernel/intern/ipo.c b/source/blender/blenkernel/intern/ipo.c index 9b72a2d1a72..26a1240080f 100644 --- a/source/blender/blenkernel/intern/ipo.c +++ b/source/blender/blenkernel/intern/ipo.c @@ -2013,7 +2013,8 @@ static void nlastrips_to_animdata(ID *id, ListBase *strips) } } - /* try to add this strip to the current NLA-Track (i.e. the 'last' one on the stack atm) */ + /* Try to add this strip to the current NLA-Track + * (i.e. the 'last' one on the stack at the moment). */ if (BKE_nlatrack_add_strip(nlt, strip, false) == 0) { /* trying to add to the current failed (no space), * so add a new track to the stack, and add to that... diff --git a/source/blender/blenkernel/intern/key.c b/source/blender/blenkernel/intern/key.c index 44fc86877a7..c09fcf0715e 100644 --- a/source/blender/blenkernel/intern/key.c +++ b/source/blender/blenkernel/intern/key.c @@ -1904,7 +1904,7 @@ KeyBlock *BKE_keyblock_add_ctime(Key *key, const char *name, const bool do_force return kb; } -/* only the active keyblock */ +/* Only the active key-block. */ KeyBlock *BKE_keyblock_from_object(Object *ob) { Key *key = BKE_key_from_object(ob); @@ -2247,7 +2247,7 @@ void BKE_keyblock_convert_to_mesh(KeyBlock *kb, Mesh *me) * Computes normals (vertices, polygons and/or loops ones) of given mesh for given shape key. * * \param kb: the KeyBlock to use to compute normals. - * \param mesh: the Mesh to apply keyblock to. + * \param mesh: the Mesh to apply key-block to. * \param r_vertnors: if non-NULL, an array of vectors, same length as number of vertices. * \param r_polynors: if non-NULL, an array of vectors, same length as number of polygons. * \param r_loopnors: if non-NULL, an array of vectors, same length as number of loops. @@ -2345,7 +2345,7 @@ void BKE_keyblock_update_from_vertcos(Object *ob, KeyBlock *kb, const float (*ve return; } - /* Copy coords to keyblock */ + /* Copy coords to key-block. */ if (ELEM(ob->type, OB_MESH, OB_LATTICE)) { for (a = 0; a < tot; a++, fp += 3, co++) { copy_v3_v3(fp, *co); @@ -2405,7 +2405,7 @@ void BKE_keyblock_convert_from_vertcos(Object *ob, KeyBlock *kb, const float (*v kb->data = MEM_mallocN(tot * elemsize, __func__); - /* Copy coords to keyblock */ + /* Copy coords to key-block. */ BKE_keyblock_update_from_vertcos(ob, kb, vertCos); } @@ -2594,7 +2594,7 @@ bool BKE_keyblock_move(Object *ob, int org_index, int new_index) } /** - * Check if given keyblock (as index) is used as basis by others in given key. + * Check if given key-block (as index) is used as basis by others in given key. */ bool BKE_keyblock_is_basis(Key *key, const int index) { diff --git a/source/blender/blenkernel/intern/lib_id.c b/source/blender/blenkernel/intern/lib_id.c index 18824e73ee5..3b2d2c5d2c3 100644 --- a/source/blender/blenkernel/intern/lib_id.c +++ b/source/blender/blenkernel/intern/lib_id.c @@ -674,7 +674,10 @@ ID *BKE_id_copy(Main *bmain, const ID *id) * Invokes the appropriate copy method for the block and returns the result in * newid, unless test. Returns true if the block can be copied. */ -ID *BKE_id_copy_for_duplicate(Main *bmain, ID *id, const eDupli_ID_Flags duplicate_flags) +ID *BKE_id_copy_for_duplicate(Main *bmain, + ID *id, + const eDupli_ID_Flags duplicate_flags, + const int copy_flags) { if (id == NULL) { return id; @@ -685,7 +688,7 @@ ID *BKE_id_copy_for_duplicate(Main *bmain, ID *id, const eDupli_ID_Flags duplica return id; } - ID *id_new = BKE_id_copy(bmain, id); + ID *id_new = BKE_id_copy_ex(bmain, id, NULL, copy_flags); /* Copying add one user by default, need to get rid of that one. */ id_us_min(id_new); ID_NEW_SET(id, id_new); diff --git a/source/blender/blenkernel/intern/lib_override.c b/source/blender/blenkernel/intern/lib_override.c index c60a9104144..68675e5fc91 100644 --- a/source/blender/blenkernel/intern/lib_override.c +++ b/source/blender/blenkernel/intern/lib_override.c @@ -1000,9 +1000,92 @@ bool BKE_lib_override_library_proxy_convert(Main *bmain, DEG_id_tag_update(&ob_proxy->id, ID_RECALC_COPY_ON_WRITE); + /* In case of proxy conversion, remap all local ID usages to linked IDs to their newly created + * overrides. + * While this might not be 100% the desired behavior, it is likely to be the case most of the + * time. Ref: T91711. */ + ID *id_iter; + FOREACH_MAIN_ID_BEGIN (bmain, id_iter) { + if (!ID_IS_LINKED(id_iter)) { + id_iter->tag |= LIB_TAG_DOIT; + } + } + FOREACH_MAIN_ID_END; + return BKE_lib_override_library_create(bmain, scene, view_layer, id_root, id_reference, NULL); } +static void lib_override_library_proxy_convert_do(Main *bmain, + Scene *scene, + Object *ob_proxy, + BlendFileReadReport *reports) +{ + Object *ob_proxy_group = ob_proxy->proxy_group; + const bool is_override_instancing_object = ob_proxy_group != NULL; + + const bool success = BKE_lib_override_library_proxy_convert(bmain, scene, NULL, ob_proxy); + + if (success) { + CLOG_INFO(&LOG, + 4, + "Proxy object '%s' successfuly converted to library overrides", + ob_proxy->id.name); + /* Remove the instance empty from this scene, the items now have an overridden collection + * instead. */ + if (is_override_instancing_object) { + BKE_scene_collections_object_remove(bmain, scene, ob_proxy_group, true); + } + reports->count.proxies_to_lib_overrides_success++; + } +} + +/** + * Convert all proxy objects into library overrides. + * + * \note Only affects local proxies, linked ones are not affected. + * + * \param view_layer: the active view layer to search instantiated collections in, can be NULL (in + * which case \a scene's master collection children hierarchy is used instead). + */ +void BKE_lib_override_library_main_proxy_convert(Main *bmain, BlendFileReadReport *reports) +{ + LISTBASE_FOREACH (Scene *, scene, &bmain->scenes) { + FOREACH_SCENE_OBJECT_BEGIN (scene, object) { + if (object->proxy_group == NULL) { + continue; + } + + lib_override_library_proxy_convert_do(bmain, scene, object, reports); + } + FOREACH_SCENE_OBJECT_END; + + FOREACH_SCENE_OBJECT_BEGIN (scene, object) { + if (object->proxy == NULL) { + continue; + } + + lib_override_library_proxy_convert_do(bmain, scene, object, reports); + } + FOREACH_SCENE_OBJECT_END; + } + + LISTBASE_FOREACH (Object *, object, &bmain->objects) { + if (ID_IS_LINKED(object)) { + if (object->proxy != NULL) { + CLOG_WARN(&LOG, "Did not try to convert linked proxy object '%s'", object->id.name); + reports->count.linked_proxies++; + } + continue; + } + + if (object->proxy_group != NULL || object->proxy != NULL) { + CLOG_WARN( + &LOG, "Proxy object '%s' failed to be converted to library override", object->id.name); + reports->count.proxies_to_lib_overrides_failures++; + } + } +} + /** * Advanced 'smart' function to resync, re-create fully functional overrides up-to-date with linked * data, from an existing override hierarchy. @@ -2889,6 +2972,31 @@ void BKE_lib_override_library_main_update(Main *bmain) G_MAIN = orig_gmain; } +/** In case an ID is used by another liboverride ID, user may not be allowed to delete it. */ +bool BKE_lib_override_library_id_is_user_deletable(struct Main *bmain, struct ID *id) +{ + if (!(ID_IS_LINKED(id) || ID_IS_OVERRIDE_LIBRARY(id))) { + return true; + } + + /* The only strong known case currently are objects used by override collections. */ + /* TODO: There are most likely other cases... This may need to be addressed in a better way at + * some point. */ + if (GS(id->name) != ID_OB) { + return true; + } + Object *ob = (Object *)id; + LISTBASE_FOREACH (Collection *, collection, &bmain->collections) { + if (!ID_IS_OVERRIDE_LIBRARY(collection)) { + continue; + } + if (BKE_collection_has_object(collection, ob)) { + return false; + } + } + return true; +} + /** * Storage (how to store overriding data into `.blend` files). * diff --git a/source/blender/blenkernel/intern/lib_remap.c b/source/blender/blenkernel/intern/lib_remap.c index 250b8d4d515..48396c5e6d9 100644 --- a/source/blender/blenkernel/intern/lib_remap.c +++ b/source/blender/blenkernel/intern/lib_remap.c @@ -345,7 +345,7 @@ static void libblock_remap_data_postprocess_obdata_relink(Main *bmain, Object *o static void libblock_remap_data_postprocess_nodetree_update(Main *bmain, ID *new_id) { /* Update all group nodes using a node group. */ - ntreeUpdateAllUsers(bmain, new_id); + ntreeUpdateAllUsers(bmain, new_id, 0); } /** diff --git a/source/blender/blenkernel/intern/main.c b/source/blender/blenkernel/intern/main.c index 26dcadcc77b..9c3291edbcc 100644 --- a/source/blender/blenkernel/intern/main.c +++ b/source/blender/blenkernel/intern/main.c @@ -485,6 +485,7 @@ void BKE_main_library_weak_reference_add_item(GHash *library_weak_reference_mapp const bool already_exist_in_mapping = BLI_ghash_ensure_p( library_weak_reference_mapping, key, &id_p); BLI_assert(!already_exist_in_mapping); + UNUSED_VARS_NDEBUG(already_exist_in_mapping); BLI_strncpy(new_id->library_weak_reference->library_filepath, library_filepath, diff --git a/source/blender/blenkernel/intern/material.c b/source/blender/blenkernel/intern/material.c index 5f53d5e1ae8..fa3fbd457d1 100644 --- a/source/blender/blenkernel/intern/material.c +++ b/source/blender/blenkernel/intern/material.c @@ -46,6 +46,7 @@ #include "DNA_meta_types.h" #include "DNA_node_types.h" #include "DNA_object_types.h" +#include "DNA_particle_types.h" #include "DNA_pointcloud_types.h" #include "DNA_scene_types.h" #include "DNA_volume_types.h" @@ -73,6 +74,7 @@ #include "BKE_material.h" #include "BKE_mesh.h" #include "BKE_node.h" +#include "BKE_object.h" #include "BKE_scene.h" #include "DEG_depsgraph.h" @@ -462,21 +464,33 @@ static void material_data_index_remove_id(ID *id, short index) } } -bool BKE_object_material_slot_used(ID *id, short actcol) +bool BKE_object_material_slot_used(Object *object, short actcol) { - /* ensure we don't try get materials from non-obdata */ - BLI_assert(OB_DATA_SUPPORT_ID(GS(id->name))); + if (!BKE_object_supports_material_slots(object)) { + return false; + } - switch (GS(id->name)) { + LISTBASE_FOREACH (ParticleSystem *, psys, &object->particlesystem) { + if (psys->part->omat == actcol) { + return true; + } + } + + ID *ob_data = object->data; + if (ob_data == NULL || !OB_DATA_SUPPORT_ID(GS(ob_data->name))) { + return false; + } + + switch (GS(ob_data->name)) { case ID_ME: - return BKE_mesh_material_index_used((Mesh *)id, actcol - 1); + return BKE_mesh_material_index_used((Mesh *)ob_data, actcol - 1); case ID_CU: - return BKE_curve_material_index_used((Curve *)id, actcol - 1); + return BKE_curve_material_index_used((Curve *)ob_data, actcol - 1); case ID_MB: - /* meta-elems don't have materials atm */ + /* Meta-elements don't support materials at the moment. */ return false; case ID_GD: - return BKE_gpencil_material_index_used((bGPdata *)id, actcol - 1); + return BKE_gpencil_material_index_used((bGPdata *)ob_data, actcol - 1); default: return false; } diff --git a/source/blender/blenkernel/intern/mball_tessellate.c b/source/blender/blenkernel/intern/mball_tessellate.c index 9dd583b4c6b..a2590171abd 100644 --- a/source/blender/blenkernel/intern/mball_tessellate.c +++ b/source/blender/blenkernel/intern/mball_tessellate.c @@ -454,7 +454,7 @@ static void make_face(PROCESS *process, int i1, int i2, int i3, int i4) cur = process->indices[process->curindex++]; - /* displists now support array drawing, we treat tri's as fake quad */ + /* #DispList supports array drawing, treat tri's as fake quad. */ cur[0] = i1; cur[1] = i2; diff --git a/source/blender/blenkernel/intern/mesh_convert.cc b/source/blender/blenkernel/intern/mesh_convert.cc index 467f7d4543e..59cdb6a2b27 100644 --- a/source/blender/blenkernel/intern/mesh_convert.cc +++ b/source/blender/blenkernel/intern/mesh_convert.cc @@ -41,6 +41,7 @@ #include "BKE_deform.h" #include "BKE_displist.h" #include "BKE_editmesh.h" +#include "BKE_geometry_set.hh" #include "BKE_key.h" #include "BKE_lib_id.h" #include "BKE_lib_query.h" @@ -51,6 +52,7 @@ #include "BKE_mesh_runtime.h" #include "BKE_mesh_wrapper.h" #include "BKE_modifier.h" +#include "BKE_spline.hh" /* these 2 are only used by conversion functions */ #include "BKE_curve.h" /* -- */ @@ -58,6 +60,8 @@ /* -- */ #include "BKE_pointcloud.h" +#include "BKE_curve_to_mesh.hh" + #include "DEG_depsgraph.h" #include "DEG_depsgraph_query.h" @@ -237,7 +241,7 @@ static int mesh_nurbs_displist_to_mdata(const Curve *cu, int a, b, ofs, vertcount, startvert, totvert = 0, totedge = 0, totloop = 0, totpoly = 0; int p1, p2, p3, p4, *index; const bool conv_polys = ( - /* 2d polys are filled with DL_INDEX3 displists */ + /* 2D polys are filled with #DispList.type == #DL_INDEX3. */ (CU_DO_2DFILL(cu) == false) || /* surf polys are never filled */ BKE_curve_type_get(cu) == OB_SURF); @@ -573,90 +577,6 @@ Mesh *BKE_mesh_new_nomain_from_curve(const Object *ob) return BKE_mesh_new_nomain_from_curve_displist(ob, &disp); } -static void mesh_from_nurbs_displist(Object *ob, ListBase *dispbase, const char *obdata_name) -{ - if (ob->runtime.data_eval && GS(((ID *)ob->runtime.data_eval)->name) != ID_ME) { - return; - } - - Mesh *me_eval = (Mesh *)ob->runtime.data_eval; - Mesh *me; - MVert *allvert = nullptr; - MEdge *alledge = nullptr; - MLoop *allloop = nullptr; - MLoopUV *alluv = nullptr; - MPoly *allpoly = nullptr; - int totvert, totedge, totloop, totpoly; - - Curve *cu = (Curve *)ob->data; - - if (me_eval == nullptr) { - if (mesh_nurbs_displist_to_mdata(cu, - dispbase, - &allvert, - &totvert, - &alledge, - &totedge, - &allloop, - &allpoly, - &alluv, - &totloop, - &totpoly) != 0) { - /* Error initializing */ - return; - } - - /* make mesh */ - me = (Mesh *)BKE_id_new_nomain(ID_ME, obdata_name); - - me->totvert = totvert; - me->totedge = totedge; - me->totloop = totloop; - me->totpoly = totpoly; - - me->mvert = (MVert *)CustomData_add_layer( - &me->vdata, CD_MVERT, CD_ASSIGN, allvert, me->totvert); - me->medge = (MEdge *)CustomData_add_layer( - &me->edata, CD_MEDGE, CD_ASSIGN, alledge, me->totedge); - me->mloop = (MLoop *)CustomData_add_layer( - &me->ldata, CD_MLOOP, CD_ASSIGN, allloop, me->totloop); - me->mpoly = (MPoly *)CustomData_add_layer( - &me->pdata, CD_MPOLY, CD_ASSIGN, allpoly, me->totpoly); - - if (alluv) { - const char *uvname = "UVMap"; - me->mloopuv = (MLoopUV *)CustomData_add_layer_named( - &me->ldata, CD_MLOOPUV, CD_ASSIGN, alluv, me->totloop, uvname); - } - - BKE_mesh_calc_normals(me); - } - else { - me = (Mesh *)BKE_id_new_nomain(ID_ME, obdata_name); - - ob->runtime.data_eval = nullptr; - BKE_mesh_nomain_to_mesh(me_eval, me, ob, &CD_MASK_MESH, true); - } - - me->totcol = cu->totcol; - me->mat = cu->mat; - - mesh_copy_texture_space_from_curve_type(cu, me); - - cu->mat = nullptr; - cu->totcol = 0; - - /* Do not decrement ob->data usercount here, - * it's done at end of func with BKE_id_free_us() call. */ - ob->data = me; - ob->type = OB_MESH; - - /* For temporary objects in BKE_mesh_new_from_object don't remap - * the entire scene with associated depsgraph updates, which are - * problematic for renderers exporting data. */ - BKE_id_free(nullptr, cu); -} - struct EdgeLink { struct EdgeLink *next, *prev; void *edge; @@ -948,54 +868,32 @@ void BKE_pointcloud_to_mesh(Main *bmain, Depsgraph *depsgraph, Scene *UNUSED(sce BKE_object_free_derived_caches(ob); } -/* Create a temporary object to be used for nurbs-to-mesh conversion. - * - * This is more complex that it should be because #mesh_from_nurbs_displist will do more than - * simply conversion and will attempt to take over ownership of evaluated result and will also - * modify the input object. */ -static Object *object_for_curve_to_mesh_create(Object *object) +/* Create a temporary object to be used for nurbs-to-mesh conversion. */ +static Object *object_for_curve_to_mesh_create(const Object *object) { - Curve *curve = (Curve *)object->data; + const Curve *curve = (const Curve *)object->data; - /* Create object itself. */ + /* Create a temporary object which can be evaluated and modified by generic + * curve evaluation (hence the #LIB_ID_COPY_SET_COPIED_ON_WRITE flag). */ Object *temp_object = (Object *)BKE_id_copy_ex( - nullptr, &object->id, nullptr, LIB_ID_COPY_LOCALIZE); + nullptr, &object->id, nullptr, LIB_ID_COPY_LOCALIZE | LIB_ID_COPY_SET_COPIED_ON_WRITE); /* Remove all modifiers, since we don't want them to be applied. */ BKE_object_free_modifiers(temp_object, LIB_ID_CREATE_NO_USER_REFCOUNT); - /* Copy relevant evaluated fields of curve cache. - * - * Note that there are extra fields in there like bevel and path, but those are not needed during - * conversion, so they are not copied to save unnecessary allocations. */ - if (temp_object->runtime.curve_cache == nullptr) { - temp_object->runtime.curve_cache = (CurveCache *)MEM_callocN(sizeof(CurveCache), - "CurveCache for curve types"); - } - - if (object->runtime.curve_cache != nullptr) { - BKE_displist_copy(&temp_object->runtime.curve_cache->disp, &object->runtime.curve_cache->disp); - } - - /* Constructive modifiers will use mesh to store result. */ - if (object->runtime.data_eval != nullptr) { - BKE_id_copy_ex( - nullptr, object->runtime.data_eval, &temp_object->runtime.data_eval, LIB_ID_COPY_LOCALIZE); - } - - /* Need to create copy of curve itself as well, it will be freed by underlying conversion - * functions. - * - * NOTE: Copies the data, but not the shapekeys. */ - BKE_id_copy_ex( - nullptr, (const ID *)object->data, (ID **)&temp_object->data, LIB_ID_COPY_LOCALIZE); + /* Need to create copy of curve itself as well, since it will be changed by the curve evaluation + * process. NOTE: Copies the data, but not the shape-keys. */ + temp_object->data = BKE_id_copy_ex(nullptr, + (const ID *)object->data, + nullptr, + LIB_ID_COPY_LOCALIZE | LIB_ID_COPY_SET_COPIED_ON_WRITE); Curve *temp_curve = (Curve *)temp_object->data; /* Make sure texture space is calculated for a copy of curve, it will be used for the final * result. */ BKE_curve_texspace_calc(temp_curve); - /* Temporarily set edit so we get updates from edit mode, but also because for text datablocks + /* Temporarily set edit so we get updates from edit mode, but also because for text data-blocks * copying it while in edit mode gives invalid data structures. */ temp_curve->editfont = curve->editfont; temp_curve->editnurb = curve->editnurb; @@ -1006,23 +904,10 @@ static Object *object_for_curve_to_mesh_create(Object *object) /** * Populate `object->runtime.curve_cache` which is then used to create the mesh. */ -static void curve_to_mesh_eval_ensure(Object *object) +static void curve_to_mesh_eval_ensure(Object &object) { - Curve *curve = (Curve *)object->data; - Curve remapped_curve = *curve; - Object remapped_object = *object; - BKE_object_runtime_reset(&remapped_object); - - remapped_object.data = &remapped_curve; - - if (object->runtime.curve_cache == nullptr) { - object->runtime.curve_cache = (CurveCache *)MEM_callocN(sizeof(CurveCache), - "CurveCache for Curve"); - } - - /* Temporarily share the curve-cache with the temporary object, owned by `object`. */ - remapped_object.runtime.curve_cache = object->runtime.curve_cache; - + BLI_assert(GS(static_cast<ID *>(object.data)->name) == ID_CU); + Curve &curve = *static_cast<Curve *>(object.data); /* Clear all modifiers for the bevel object. * * This is because they can not be reliably evaluated for an original object (at least because @@ -1031,83 +916,97 @@ static void curve_to_mesh_eval_ensure(Object *object) * So we create temporary copy of the object which will use same data as the original bevel, but * will have no modifiers. */ Object bevel_object = {{nullptr}}; - if (remapped_curve.bevobj != nullptr) { - bevel_object = *remapped_curve.bevobj; + if (curve.bevobj != nullptr) { + bevel_object = *curve.bevobj; BLI_listbase_clear(&bevel_object.modifiers); BKE_object_runtime_reset(&bevel_object); - remapped_curve.bevobj = &bevel_object; + curve.bevobj = &bevel_object; } /* Same thing for taper. */ Object taper_object = {{nullptr}}; - if (remapped_curve.taperobj != nullptr) { - taper_object = *remapped_curve.taperobj; + if (curve.taperobj != nullptr) { + taper_object = *curve.taperobj; BLI_listbase_clear(&taper_object.modifiers); BKE_object_runtime_reset(&taper_object); - remapped_curve.taperobj = &taper_object; + curve.taperobj = &taper_object; } /* NOTE: We don't have dependency graph or scene here, so we pass nullptr. This is all fine since * they are only used for modifier stack, which we have explicitly disabled for all objects. * * TODO(sergey): This is a very fragile logic, but proper solution requires re-writing quite a - * bit of internal functions (#mesh_from_nurbs_displist, BKE_mesh_nomain_to_mesh) and also - * Mesh From Curve operator. + * bit of internal functions (#BKE_mesh_nomain_to_mesh) and also Mesh From Curve operator. * Brecht says hold off with that. */ - Mesh *mesh_eval = nullptr; - BKE_displist_make_curveTypes_forRender( - nullptr, nullptr, &remapped_object, &remapped_object.runtime.curve_cache->disp, &mesh_eval); + BKE_displist_make_curveTypes(nullptr, nullptr, &object, true); - /* NOTE: this is to be consistent with `BKE_displist_make_curveTypes()`, however that is not a - * real issue currently, code here is broken in more than one way, fix(es) will be done - * separately. */ - if (mesh_eval != nullptr) { - BKE_object_eval_assign_data(&remapped_object, &mesh_eval->id, true); - } - - /* Owned by `object` & needed by the caller to create the mesh. */ - remapped_object.runtime.curve_cache = nullptr; - - BKE_object_runtime_free_data(&remapped_object); - BKE_object_runtime_free_data(&taper_object); + BKE_object_runtime_free_data(&bevel_object); BKE_object_runtime_free_data(&taper_object); } -static Mesh *mesh_new_from_curve_type_object(Object *object) +/* Necessary because #BKE_object_get_evaluated_mesh doesn't look in the geometry set yet. */ +static const Mesh *get_evaluated_mesh_from_object(const Object *object) { - Curve *curve = (Curve *)object->data; - Object *temp_object = object_for_curve_to_mesh_create(object); - Curve *temp_curve = (Curve *)temp_object->data; + const Mesh *mesh = BKE_object_get_evaluated_mesh(object); + if (mesh) { + return mesh; + } + GeometrySet *geometry_set_eval = object->runtime.geometry_set_eval; + if (geometry_set_eval) { + return geometry_set_eval->get_mesh_for_read(); + } + return nullptr; +} - /* When input object is an original one, we don't have evaluated curve cache yet, so need to - * create it in the temporary object. */ - if (!DEG_is_evaluated_object(object)) { - curve_to_mesh_eval_ensure(temp_object); +static const CurveEval *get_evaluated_curve_from_object(const Object *object) +{ + GeometrySet *geometry_set_eval = object->runtime.geometry_set_eval; + if (geometry_set_eval) { + return geometry_set_eval->get_curve_for_read(); } + return nullptr; +} - /* Reset pointers before conversion. */ - temp_curve->editfont = nullptr; - temp_curve->editnurb = nullptr; +static Mesh *mesh_new_from_evaluated_curve_type_object(const Object *evaluated_object) +{ + const Mesh *mesh = get_evaluated_mesh_from_object(evaluated_object); + if (mesh) { + return BKE_mesh_copy_for_eval(mesh, false); + } + const CurveEval *curve = get_evaluated_curve_from_object(evaluated_object); + if (curve) { + return blender::bke::curve_to_wire_mesh(*curve); + } + return nullptr; +} - /* Convert to mesh. */ - mesh_from_nurbs_displist( - temp_object, &temp_object->runtime.curve_cache->disp, curve->id.name + 2); +static Mesh *mesh_new_from_curve_type_object(const Object *object) +{ + /* If the object is evaluated, it should either have an evaluated mesh or curve data already. + * The mesh can be duplicated, or the curve converted to wire mesh edges. */ + if (DEG_is_evaluated_object(object)) { + return mesh_new_from_evaluated_curve_type_object(object); + } - /* #mesh_from_nurbs_displist changes the type to a mesh, check it worked. If it didn't - * the curve did not have any segments or otherwise would have generated an empty mesh. */ - if (temp_object->type != OB_MESH) { - BKE_id_free(nullptr, temp_object->data); - BKE_id_free(nullptr, temp_object); - return nullptr; + /* Otherwise, create a temporary "fake" evaluated object and try again. This might have + * different results, since in order to avoid having adverse affects to other original objects, + * modifiers are cleared. An alternative would be to create a temporary depsgraph only for this + * object and its dependencies. */ + Object *temp_object = object_for_curve_to_mesh_create(object); + ID *temp_data = static_cast<ID *>(temp_object->data); + curve_to_mesh_eval_ensure(*temp_object); + + /* If evaluating the curve replaced object data with different data, free the original data. */ + if (temp_data != temp_object->data) { + BKE_id_free(nullptr, temp_data); } - Mesh *mesh_result = (Mesh *)temp_object->data; + Mesh *mesh = mesh_new_from_evaluated_curve_type_object(temp_object); + BKE_id_free(nullptr, temp_object->data); BKE_id_free(nullptr, temp_object); - /* NOTE: Materials are copied in #mesh_from_nurbs_displist(). */ - - return mesh_result; + return mesh; } static Mesh *mesh_new_from_mball_object(Object *object) @@ -1290,7 +1189,7 @@ Mesh *BKE_mesh_new_from_object_to_bmain(Main *bmain, return mesh_in_bmain; } - /* Make sure mesh only points original datablocks, also increase users of materials and other + /* Make sure mesh only points original data-blocks, also increase users of materials and other * possibly referenced data-blocks. * * Going to original data-blocks is required to have bmain in a consistent state, where diff --git a/source/blender/blenkernel/intern/modifier.c b/source/blender/blenkernel/intern/modifier.c index b55b02c7bf2..6f6cf12f023 100644 --- a/source/blender/blenkernel/intern/modifier.c +++ b/source/blender/blenkernel/intern/modifier.c @@ -100,7 +100,7 @@ void BKE_modifier_init(void) /* Initialize modifier types */ modifier_type_init(modifier_types); /* MOD_utils.c */ - /* Initialize global cmmon storage used for virtual modifier list */ + /* Initialize global common storage used for virtual modifier list. */ md = BKE_modifier_new(eModifierType_Armature); virtualModifierCommonData.amd = *((ArmatureModifierData *)md); BKE_modifier_free(md); diff --git a/source/blender/blenkernel/intern/nla.c b/source/blender/blenkernel/intern/nla.c index 4ce2ae3c11f..487e925df79 100644 --- a/source/blender/blenkernel/intern/nla.c +++ b/source/blender/blenkernel/intern/nla.c @@ -1484,7 +1484,7 @@ void BKE_nlastrip_recalculate_bounds(NlaStrip *strip) } /* Is the given NLA-strip the first one to occur for the given AnimData block */ -// TODO: make this an api method if necessary, but need to add prefix first +/* TODO: make this an api method if necessary, but need to add prefix first */ static bool nlastrip_is_first(AnimData *adt, NlaStrip *strip) { NlaTrack *nlt; diff --git a/source/blender/blenkernel/intern/node.cc b/source/blender/blenkernel/intern/node.cc index d56a7bf8fb4..73060caa2f8 100644 --- a/source/blender/blenkernel/intern/node.cc +++ b/source/blender/blenkernel/intern/node.cc @@ -52,9 +52,12 @@ #include "BLI_map.hh" #include "BLI_math.h" #include "BLI_path_util.h" +#include "BLI_set.hh" +#include "BLI_stack.hh" #include "BLI_string.h" #include "BLI_string_utils.h" #include "BLI_utildefines.h" +#include "BLI_vector_set.hh" #include "BLT_translation.h" @@ -80,6 +83,7 @@ #include "NOD_function.h" #include "NOD_geometry.h" #include "NOD_node_declaration.hh" +#include "NOD_node_tree_ref.hh" #include "NOD_shader.h" #include "NOD_socket.h" #include "NOD_texture.h" @@ -93,6 +97,21 @@ #define NODE_DEFAULT_MAX_WIDTH 700 +using blender::Array; +using blender::MutableSpan; +using blender::Set; +using blender::Span; +using blender::Stack; +using blender::Vector; +using blender::VectorSet; +using blender::nodes::FieldInferencingInterface; +using blender::nodes::InputSocketFieldType; +using blender::nodes::NodeDeclaration; +using blender::nodes::OutputFieldDependency; +using blender::nodes::OutputSocketFieldType; +using blender::nodes::SocketDeclaration; +using namespace blender::nodes::node_tree_ref_types; + /* Fallback types for undefined tree, nodes, sockets */ static bNodeTreeType NodeTreeTypeUndefined; bNodeType NodeTypeUndefined; @@ -110,6 +129,10 @@ static void node_socket_interface_free(bNodeTree *UNUSED(ntree), static void nodeMuteRerouteOutputLinks(struct bNodeTree *ntree, struct bNode *node, const bool mute); +static FieldInferencingInterface *node_field_inferencing_interface_copy( + const FieldInferencingInterface &field_inferencing_interface); +static void node_field_inferencing_interface_free( + const FieldInferencingInterface *field_inferencing_interface); static void ntree_init_data(ID *id) { @@ -220,6 +243,11 @@ static void ntree_copy_data(Main *UNUSED(bmain), ID *id_dst, const ID *id_src, c /* node tree will generate its own interface type */ ntree_dst->interface_type = nullptr; + + if (ntree_src->field_inferencing_interface) { + ntree_dst->field_inferencing_interface = node_field_inferencing_interface_copy( + *ntree_src->field_inferencing_interface); + } } static void ntree_free_data(ID *id) @@ -265,6 +293,8 @@ static void ntree_free_data(ID *id) MEM_freeN(sock); } + node_field_inferencing_interface_free(ntree->field_inferencing_interface); + /* free preview hash */ if (ntree->previews) { BKE_node_instance_hash_free(ntree->previews, (bNodeInstanceValueFP)BKE_node_preview_free); @@ -508,7 +538,7 @@ void ntreeBlendWrite(BlendWriter *writer, bNodeTree *ntree) if (node->storage) { /* could be handlerized at some point, now only 1 exception still */ if (ELEM(ntree->type, NTREE_SHADER, NTREE_GEOMETRY) && - ELEM(node->type, SH_NODE_CURVE_VEC, SH_NODE_CURVE_RGB)) { + ELEM(node->type, SH_NODE_CURVE_VEC, SH_NODE_CURVE_RGB, SH_NODE_CURVE_FLOAT)) { BKE_curvemapping_blend_write(writer, (const CurveMapping *)node->storage); } else if ((ntree->type == NTREE_GEOMETRY) && @@ -647,6 +677,8 @@ void ntreeBlendReadData(BlendDataReader *reader, bNodeTree *ntree) ntree->progress = nullptr; ntree->execdata = nullptr; + ntree->field_inferencing_interface = nullptr; + BLO_read_data_address(reader, &ntree->adt); BKE_animdata_blend_read_data(reader, ntree->adt); @@ -682,6 +714,7 @@ void ntreeBlendReadData(BlendDataReader *reader, bNodeTree *ntree) switch (node->type) { case SH_NODE_CURVE_VEC: case SH_NODE_CURVE_RGB: + case SH_NODE_CURVE_FLOAT: case CMP_NODE_TIME: case CMP_NODE_CURVE_VEC: case CMP_NODE_CURVE_RGB: @@ -792,6 +825,11 @@ void ntreeBlendReadData(BlendDataReader *reader, bNodeTree *ntree) /* TODO: should be dealt by new generic cache handling of IDs... */ ntree->previews = nullptr; + if (ntree->type == NTREE_GEOMETRY) { + /* Update field referencing for the geometry nodes modifier. */ + ntree->update |= NTREE_UPDATE_FIELD_INFERENCING; + } + /* type verification is in lib-link */ } @@ -1092,7 +1130,7 @@ static void node_init(const struct bContext *C, bNodeTree *ntree, bNode *node) RNA_pointer_create((ID *)ntree, &RNA_Node, node, &ptr); /* XXX Warning: context can be nullptr in case nodes are added in do_versions. - * Delayed init is not supported for nodes with context-based initfunc_api atm. + * Delayed init is not supported for nodes with context-based `initfunc_api` at the moment. */ BLI_assert(C != nullptr); ntype->initfunc_api(C, &ptr); @@ -4425,7 +4463,510 @@ void ntreeUpdateAllNew(Main *main) FOREACH_NODETREE_END; } -void ntreeUpdateAllUsers(Main *main, ID *id) +static FieldInferencingInterface *node_field_inferencing_interface_copy( + const FieldInferencingInterface &field_inferencing_interface) +{ + return new FieldInferencingInterface(field_inferencing_interface); +} + +static void node_field_inferencing_interface_free( + const FieldInferencingInterface *field_inferencing_interface) +{ + delete field_inferencing_interface; +} + +namespace blender::bke::node_field_inferencing { + +static bool is_field_socket_type(eNodeSocketDatatype type) +{ + return ELEM(type, SOCK_FLOAT, SOCK_INT, SOCK_BOOLEAN, SOCK_VECTOR, SOCK_RGBA); +} + +static bool is_field_socket_type(const SocketRef &socket) +{ + return is_field_socket_type((eNodeSocketDatatype)socket.typeinfo()->type); +} + +static bool update_field_inferencing(bNodeTree &btree); + +static InputSocketFieldType get_interface_input_field_type(const NodeRef &node, + const InputSocketRef &socket) +{ + if (!is_field_socket_type(socket)) { + return InputSocketFieldType::None; + } + if (node.is_reroute_node()) { + return InputSocketFieldType::IsSupported; + } + if (node.is_group_output_node()) { + /* Outputs always support fields when the data type is correct. */ + return InputSocketFieldType::IsSupported; + } + if (node.is_undefined()) { + return InputSocketFieldType::None; + } + + const NodeDeclaration *node_decl = node.declaration(); + + /* Node declarations should be implemented for nodes involved here. */ + BLI_assert(node_decl != nullptr); + + /* Get the field type from the declaration. */ + const SocketDeclaration &socket_decl = *node_decl->inputs()[socket.index()]; + const InputSocketFieldType field_type = socket_decl.input_field_type(); + if (field_type == InputSocketFieldType::Implicit) { + return field_type; + } + if (node_decl->is_function_node()) { + /* In a function node, every socket supports fields. */ + return InputSocketFieldType::IsSupported; + } + return field_type; +} + +static OutputFieldDependency get_interface_output_field_dependency(const NodeRef &node, + const OutputSocketRef &socket) +{ + if (!is_field_socket_type(socket)) { + /* Non-field sockets always output data. */ + return OutputFieldDependency::ForDataSource(); + } + if (node.is_reroute_node()) { + /* The reroute just forwards what is passed in. */ + return OutputFieldDependency::ForDependentField(); + } + if (node.is_group_input_node()) { + /* Input nodes get special treatment in #determine_group_input_states. */ + return OutputFieldDependency::ForDependentField(); + } + if (node.is_undefined()) { + return OutputFieldDependency::ForDataSource(); + } + + const NodeDeclaration *node_decl = node.declaration(); + + /* Node declarations should be implemented for nodes involved here. */ + BLI_assert(node_decl != nullptr); + + if (node_decl->is_function_node()) { + /* In a generic function node, all outputs depend on all inputs. */ + return OutputFieldDependency::ForDependentField(); + } + + /* Use the socket declaration. */ + const SocketDeclaration &socket_decl = *node_decl->outputs()[socket.index()]; + return socket_decl.output_field_dependency(); +} + +/** + * Retrieves information about how the node interacts with fields. + * In the future, this information can be stored in the node declaration. This would allow this + * function to return a reference, making it more efficient. + */ +static FieldInferencingInterface get_node_field_inferencing_interface(const NodeRef &node) +{ + /* Node groups already reference all required information, so just return that. */ + if (node.is_group_node()) { + bNodeTree *group = (bNodeTree *)node.bnode()->id; + if (group == nullptr) { + return FieldInferencingInterface(); + } + if (group->field_inferencing_interface == nullptr) { + /* Update group recursively. */ + update_field_inferencing(*group); + } + return *group->field_inferencing_interface; + } + + FieldInferencingInterface inferencing_interface; + for (const InputSocketRef *input_socket : node.inputs()) { + inferencing_interface.inputs.append(get_interface_input_field_type(node, *input_socket)); + } + + for (const OutputSocketRef *output_socket : node.outputs()) { + inferencing_interface.outputs.append( + get_interface_output_field_dependency(node, *output_socket)); + } + return inferencing_interface; +} + +/** + * This struct contains information for every socket. The values are propagated through the + * network. + */ +struct SocketFieldState { + /* This socket is currently a single value. It could become a field though. */ + bool is_single = true; + /* This socket is required to be a single value. It must not be a field. */ + bool requires_single = false; + /* This socket starts a new field. */ + bool is_field_source = false; +}; + +static Vector<const InputSocketRef *> gather_input_socket_dependencies( + const OutputFieldDependency &field_dependency, const NodeRef &node) +{ + const OutputSocketFieldType type = field_dependency.field_type(); + Vector<const InputSocketRef *> input_sockets; + switch (type) { + case OutputSocketFieldType::FieldSource: + case OutputSocketFieldType::None: { + break; + } + case OutputSocketFieldType::DependentField: { + /* This output depends on all inputs. */ + input_sockets.extend(node.inputs()); + break; + } + case OutputSocketFieldType::PartiallyDependent: { + /* This output depends only on a few inputs. */ + for (const int i : field_dependency.linked_input_indices()) { + input_sockets.append(&node.input(i)); + } + break; + } + } + return input_sockets; +} + +/** + * Check what the group output socket depends on. Potentially traverses the node tree + * to figure out if it is always a field or if it depends on any group inputs. + */ +static OutputFieldDependency find_group_output_dependencies( + const InputSocketRef &group_output_socket, + const Span<SocketFieldState> field_state_by_socket_id) +{ + if (!is_field_socket_type(group_output_socket)) { + return OutputFieldDependency::ForDataSource(); + } + + /* Use a Set here instead of an array indexed by socket id, because we my only need to look at + * very few sockets. */ + Set<const InputSocketRef *> handled_sockets; + Stack<const InputSocketRef *> sockets_to_check; + + handled_sockets.add(&group_output_socket); + sockets_to_check.push(&group_output_socket); + + /* Keeps track of group input indices that are (indirectly) connected to the output. */ + Vector<int> linked_input_indices; + + while (!sockets_to_check.is_empty()) { + const InputSocketRef *input_socket = sockets_to_check.pop(); + + for (const OutputSocketRef *origin_socket : input_socket->logically_linked_sockets()) { + const NodeRef &origin_node = origin_socket->node(); + const SocketFieldState &origin_state = field_state_by_socket_id[origin_socket->id()]; + + if (origin_state.is_field_source) { + if (origin_node.is_group_input_node()) { + /* Found a group input that the group output depends on. */ + linked_input_indices.append_non_duplicates(origin_socket->index()); + } + else { + /* Found a field source that is not the group input. So the output is always a field. */ + return OutputFieldDependency::ForFieldSource(); + } + } + else if (!origin_state.is_single) { + const FieldInferencingInterface inferencing_interface = + get_node_field_inferencing_interface(origin_node); + const OutputFieldDependency &field_dependency = + inferencing_interface.outputs[origin_socket->index()]; + + /* Propagate search further to the left. */ + for (const InputSocketRef *origin_input_socket : + gather_input_socket_dependencies(field_dependency, origin_node)) { + if (!field_state_by_socket_id[origin_input_socket->id()].is_single) { + if (handled_sockets.add(origin_input_socket)) { + sockets_to_check.push(origin_input_socket); + } + } + } + } + } + } + return OutputFieldDependency::ForPartiallyDependentField(std::move(linked_input_indices)); +} + +static void propagate_data_requirements_from_right_to_left( + const NodeTreeRef &tree, const MutableSpan<SocketFieldState> field_state_by_socket_id) +{ + const Vector<const NodeRef *> sorted_nodes = tree.toposort( + NodeTreeRef::ToposortDirection::RightToLeft); + + for (const NodeRef *node : sorted_nodes) { + const FieldInferencingInterface inferencing_interface = get_node_field_inferencing_interface( + *node); + + for (const OutputSocketRef *output_socket : node->outputs()) { + SocketFieldState &state = field_state_by_socket_id[output_socket->id()]; + + const OutputFieldDependency &field_dependency = + inferencing_interface.outputs[output_socket->index()]; + + if (field_dependency.field_type() == OutputSocketFieldType::FieldSource) { + continue; + } + if (field_dependency.field_type() == OutputSocketFieldType::None) { + state.requires_single = true; + continue; + } + + /* The output is required to be a single value when it is connected to any input that does + * not support fields. */ + for (const InputSocketRef *target_socket : output_socket->directly_linked_sockets()) { + state.requires_single |= field_state_by_socket_id[target_socket->id()].requires_single; + } + + if (state.requires_single) { + bool any_input_is_field_implicitly = false; + const Vector<const InputSocketRef *> connected_inputs = gather_input_socket_dependencies( + field_dependency, *node); + for (const InputSocketRef *input_socket : connected_inputs) { + if (inferencing_interface.inputs[input_socket->index()] == + InputSocketFieldType::Implicit) { + if (!input_socket->is_logically_linked()) { + any_input_is_field_implicitly = true; + break; + } + } + } + if (any_input_is_field_implicitly) { + /* This output isn't a single value actually. */ + state.requires_single = false; + } + else { + /* If the output is required to be a single value, the connected inputs in the same node + * must not be fields as well. */ + for (const InputSocketRef *input_socket : connected_inputs) { + field_state_by_socket_id[input_socket->id()].requires_single = true; + } + } + } + } + + /* Some inputs do not require fields independent of what the outputs are connected to. */ + for (const InputSocketRef *input_socket : node->inputs()) { + SocketFieldState &state = field_state_by_socket_id[input_socket->id()]; + if (inferencing_interface.inputs[input_socket->index()] == InputSocketFieldType::None) { + state.requires_single = true; + } + } + } +} + +static void determine_group_input_states( + const NodeTreeRef &tree, + FieldInferencingInterface &new_inferencing_interface, + const MutableSpan<SocketFieldState> field_state_by_socket_id) +{ + { + /* Non-field inputs never support fields. */ + int index; + LISTBASE_FOREACH_INDEX (bNodeSocket *, group_input, &tree.btree()->inputs, index) { + if (!is_field_socket_type((eNodeSocketDatatype)group_input->type)) { + new_inferencing_interface.inputs[index] = InputSocketFieldType::None; + } + } + } + /* Check if group inputs are required to be single values, because they are (indirectly) + * connected to some socket that does not support fields. */ + for (const NodeRef *node : tree.nodes_by_type("NodeGroupInput")) { + for (const OutputSocketRef *output_socket : node->outputs().drop_back(1)) { + SocketFieldState &state = field_state_by_socket_id[output_socket->id()]; + if (state.requires_single) { + new_inferencing_interface.inputs[output_socket->index()] = InputSocketFieldType::None; + } + } + } + /* If an input does not support fields, this should be reflected in all Group Input nodes. */ + for (const NodeRef *node : tree.nodes_by_type("NodeGroupInput")) { + for (const OutputSocketRef *output_socket : node->outputs().drop_back(1)) { + SocketFieldState &state = field_state_by_socket_id[output_socket->id()]; + const bool supports_field = new_inferencing_interface.inputs[output_socket->index()] != + InputSocketFieldType::None; + if (supports_field) { + state.is_single = false; + state.is_field_source = true; + } + else { + state.requires_single = true; + } + } + SocketFieldState &dummy_socket_state = field_state_by_socket_id[node->outputs().last()->id()]; + dummy_socket_state.requires_single = true; + } +} + +static void propagate_field_status_from_left_to_right( + const NodeTreeRef &tree, const MutableSpan<SocketFieldState> field_state_by_socket_id) +{ + Vector<const NodeRef *> sorted_nodes = tree.toposort( + NodeTreeRef::ToposortDirection::LeftToRight); + + for (const NodeRef *node : sorted_nodes) { + if (node->is_group_input_node()) { + continue; + } + + const FieldInferencingInterface inferencing_interface = get_node_field_inferencing_interface( + *node); + + /* Update field state of input sockets, also taking into account linked origin sockets. */ + for (const InputSocketRef *input_socket : node->inputs()) { + SocketFieldState &state = field_state_by_socket_id[input_socket->id()]; + if (state.requires_single) { + state.is_single = true; + continue; + } + state.is_single = true; + if (input_socket->logically_linked_sockets().is_empty()) { + if (inferencing_interface.inputs[input_socket->index()] == + InputSocketFieldType::Implicit) { + state.is_single = false; + } + } + else { + for (const OutputSocketRef *origin_socket : input_socket->logically_linked_sockets()) { + if (!field_state_by_socket_id[origin_socket->id()].is_single) { + state.is_single = false; + break; + } + } + } + } + + /* Update field state of output sockets, also taking into account input sockets. */ + for (const OutputSocketRef *output_socket : node->outputs()) { + SocketFieldState &state = field_state_by_socket_id[output_socket->id()]; + const OutputFieldDependency &field_dependency = + inferencing_interface.outputs[output_socket->index()]; + + switch (field_dependency.field_type()) { + case OutputSocketFieldType::None: { + state.is_single = true; + break; + } + case OutputSocketFieldType::FieldSource: { + state.is_single = false; + state.is_field_source = true; + break; + } + case OutputSocketFieldType::PartiallyDependent: + case OutputSocketFieldType::DependentField: { + for (const InputSocketRef *input_socket : + gather_input_socket_dependencies(field_dependency, *node)) { + if (!field_state_by_socket_id[input_socket->id()].is_single) { + state.is_single = false; + break; + } + } + break; + } + } + } + } +} + +static void determine_group_output_states(const NodeTreeRef &tree, + FieldInferencingInterface &new_inferencing_interface, + const Span<SocketFieldState> field_state_by_socket_id) +{ + for (const NodeRef *group_output_node : tree.nodes_by_type("NodeGroupOutput")) { + /* Ignore inactive group output nodes. */ + if (!(group_output_node->bnode()->flag & NODE_DO_OUTPUT)) { + continue; + } + /* Determine dependencies of all group outputs. */ + for (const InputSocketRef *group_output_socket : group_output_node->inputs().drop_back(1)) { + OutputFieldDependency field_dependency = find_group_output_dependencies( + *group_output_socket, field_state_by_socket_id); + new_inferencing_interface.outputs[group_output_socket->index()] = std::move( + field_dependency); + } + break; + } +} + +static void update_socket_shapes(const NodeTreeRef &tree, + const Span<SocketFieldState> field_state_by_socket_id) +{ + const eNodeSocketDisplayShape requires_data_shape = SOCK_DISPLAY_SHAPE_CIRCLE; + const eNodeSocketDisplayShape data_but_can_be_field_shape = SOCK_DISPLAY_SHAPE_DIAMOND_DOT; + const eNodeSocketDisplayShape is_field_shape = SOCK_DISPLAY_SHAPE_DIAMOND; + + for (const InputSocketRef *socket : tree.input_sockets()) { + bNodeSocket *bsocket = socket->bsocket(); + const SocketFieldState &state = field_state_by_socket_id[socket->id()]; + if (state.requires_single) { + bsocket->display_shape = requires_data_shape; + } + else if (state.is_single) { + bsocket->display_shape = data_but_can_be_field_shape; + } + else { + bsocket->display_shape = is_field_shape; + } + } + for (const OutputSocketRef *socket : tree.output_sockets()) { + bNodeSocket *bsocket = socket->bsocket(); + const SocketFieldState &state = field_state_by_socket_id[socket->id()]; + if (state.requires_single) { + bsocket->display_shape = requires_data_shape; + } + else if (state.is_single) { + bsocket->display_shape = data_but_can_be_field_shape; + } + else { + bsocket->display_shape = is_field_shape; + } + } +} + +static bool update_field_inferencing(bNodeTree &btree) +{ + using namespace blender::nodes; + if (btree.type != NTREE_GEOMETRY) { + return false; + } + + /* Create new inferencing interface for this node group. */ + FieldInferencingInterface *new_inferencing_interface = new FieldInferencingInterface(); + new_inferencing_interface->inputs.resize(BLI_listbase_count(&btree.inputs), + InputSocketFieldType::IsSupported); + new_inferencing_interface->outputs.resize(BLI_listbase_count(&btree.outputs), + OutputFieldDependency::ForDataSource()); + + /* Create #NodeTreeRef to accelerate various queries on the node tree (e.g. linked sockets). */ + const NodeTreeRef tree{&btree}; + + /* Keep track of the state of all sockets. The index into this array is #SocketRef::id(). */ + Array<SocketFieldState> field_state_by_socket_id(tree.sockets().size()); + + propagate_data_requirements_from_right_to_left(tree, field_state_by_socket_id); + determine_group_input_states(tree, *new_inferencing_interface, field_state_by_socket_id); + propagate_field_status_from_left_to_right(tree, field_state_by_socket_id); + determine_group_output_states(tree, *new_inferencing_interface, field_state_by_socket_id); + update_socket_shapes(tree, field_state_by_socket_id); + + /* Update the previous group interface. */ + const bool group_interface_changed = btree.field_inferencing_interface == nullptr || + *btree.field_inferencing_interface != + *new_inferencing_interface; + delete btree.field_inferencing_interface; + btree.field_inferencing_interface = new_inferencing_interface; + + return group_interface_changed; +} + +} // namespace blender::bke::node_field_inferencing + +/** + * \param tree_update_flag: #eNodeTreeUpdate enum. + */ +void ntreeUpdateAllUsers(Main *main, ID *id, const int tree_update_flag) { if (id == nullptr) { return; @@ -4446,7 +4987,8 @@ void ntreeUpdateAllUsers(Main *main, ID *id) } if (need_update) { - ntreeUpdateTree(nullptr, ntree); + ntree->update |= tree_update_flag; + ntreeUpdateTree(tree_update_flag ? main : nullptr, ntree); } } FOREACH_NODETREE_END; @@ -4508,8 +5050,18 @@ void ntreeUpdateTree(Main *bmain, bNodeTree *ntree) ntreeInterfaceTypeUpdate(ntree); } + int tree_user_update_flag = 0; + + if (ntree->update & NTREE_UPDATE) { + /* If the field interface of this node tree has changed, all node trees using + * this group will need to recalculate their interface as well. */ + if (blender::bke::node_field_inferencing::update_field_inferencing(*ntree)) { + tree_user_update_flag |= NTREE_UPDATE_FIELD_INFERENCING; + } + } + if (bmain) { - ntreeUpdateAllUsers(bmain, &ntree->id); + ntreeUpdateAllUsers(bmain, &ntree->id, tree_user_update_flag); } if (ntree->update & (NTREE_UPDATE_LINKS | NTREE_UPDATE_NODES)) { @@ -5023,6 +5575,7 @@ static void registerShaderNodes() register_node_type_sh_shadertorgb(); register_node_type_sh_normal(); register_node_type_sh_mapping(); + register_node_type_sh_curve_float(); register_node_type_sh_curve_vec(); register_node_type_sh_curve_rgb(); register_node_type_sh_map_range(); @@ -5156,10 +5709,17 @@ static void registerGeometryNodes() { register_node_type_geo_group(); + register_node_type_geo_legacy_curve_set_handles(); + register_node_type_geo_legacy_attribute_proximity(); + register_node_type_geo_legacy_attribute_randomize(); register_node_type_geo_legacy_material_assign(); register_node_type_geo_legacy_select_by_material(); + register_node_type_geo_legacy_curve_spline_type(); + register_node_type_geo_legacy_curve_reverse(); + register_node_type_geo_legacy_curve_subdivide(); register_node_type_geo_align_rotation_to_vector(); + register_node_type_geo_attribute_capture(); register_node_type_geo_attribute_clamp(); register_node_type_geo_attribute_color_ramp(); register_node_type_geo_attribute_combine_xyz(); @@ -5167,12 +5727,9 @@ static void registerGeometryNodes() register_node_type_geo_attribute_convert(); register_node_type_geo_attribute_curve_map(); register_node_type_geo_attribute_fill(); - register_node_type_geo_attribute_capture(); register_node_type_geo_attribute_map_range(); register_node_type_geo_attribute_math(); register_node_type_geo_attribute_mix(); - register_node_type_geo_attribute_proximity(); - register_node_type_geo_attribute_randomize(); register_node_type_geo_attribute_remove(); register_node_type_geo_attribute_separate_xyz(); register_node_type_geo_attribute_statistic(); @@ -5183,9 +5740,9 @@ static void registerGeometryNodes() register_node_type_geo_bounding_box(); register_node_type_geo_collection_info(); register_node_type_geo_convex_hull(); - register_node_type_geo_curve_sample(); register_node_type_geo_curve_endpoints(); register_node_type_geo_curve_fill(); + register_node_type_geo_curve_fillet(); register_node_type_geo_curve_length(); register_node_type_geo_curve_parameter(); register_node_type_geo_curve_primitive_bezier_segment(); @@ -5197,24 +5754,28 @@ static void registerGeometryNodes() register_node_type_geo_curve_primitive_star(); register_node_type_geo_curve_resample(); register_node_type_geo_curve_reverse(); + register_node_type_geo_curve_sample(); register_node_type_geo_curve_set_handles(); register_node_type_geo_curve_spline_type(); register_node_type_geo_curve_subdivide(); - register_node_type_geo_curve_fillet(); register_node_type_geo_curve_to_mesh(); register_node_type_geo_curve_to_points(); register_node_type_geo_curve_trim(); register_node_type_geo_delete_geometry(); + register_node_type_geo_distribute_points_on_faces(); register_node_type_geo_edge_split(); register_node_type_geo_input_index(); register_node_type_geo_input_material(); register_node_type_geo_input_normal(); register_node_type_geo_input_position(); register_node_type_geo_input_tangent(); + register_node_type_geo_input_spline_length(); + register_node_type_geo_instance_on_points(); register_node_type_geo_is_viewport(); register_node_type_geo_join_geometry(); register_node_type_geo_material_assign(); register_node_type_geo_material_replace(); + register_node_type_geo_material_selection(); register_node_type_geo_mesh_primitive_circle(); register_node_type_geo_mesh_primitive_cone(); register_node_type_geo_mesh_primitive_cube(); @@ -5225,6 +5786,7 @@ static void registerGeometryNodes() register_node_type_geo_mesh_primitive_uv_sphere(); register_node_type_geo_mesh_subdivide(); register_node_type_geo_mesh_to_curve(); + register_node_type_geo_mesh_to_points(); register_node_type_geo_object_info(); register_node_type_geo_point_distribute(); register_node_type_geo_point_instance(); @@ -5232,15 +5794,17 @@ static void registerGeometryNodes() register_node_type_geo_point_scale(); register_node_type_geo_point_separate(); register_node_type_geo_point_translate(); + register_node_type_geo_points_to_vertices(); register_node_type_geo_points_to_volume(); + register_node_type_geo_proximity(); register_node_type_geo_raycast(); register_node_type_geo_realize_instances(); register_node_type_geo_sample_texture(); register_node_type_geo_select_by_handle_type(); - register_node_type_geo_string_join(); - register_node_type_geo_material_selection(); register_node_type_geo_separate_components(); register_node_type_geo_set_position(); + register_node_type_geo_string_join(); + register_node_type_geo_string_to_curves(); register_node_type_geo_subdivision_surface(); register_node_type_geo_switch(); register_node_type_geo_transform(); @@ -5251,12 +5815,16 @@ static void registerGeometryNodes() static void registerFunctionNodes() { + register_node_type_fn_legacy_random_float(); + register_node_type_fn_boolean_math(); register_node_type_fn_float_compare(); register_node_type_fn_float_to_int(); + register_node_type_fn_input_special_characters(); register_node_type_fn_input_string(); register_node_type_fn_input_vector(); - register_node_type_fn_random_float(); + register_node_type_fn_random_value(); + register_node_type_fn_rotate_euler(); register_node_type_fn_string_length(); register_node_type_fn_string_substring(); register_node_type_fn_value_to_string(); diff --git a/source/blender/blenkernel/intern/object.c b/source/blender/blenkernel/intern/object.c index 465ec9dc665..ec39c5b45c4 100644 --- a/source/blender/blenkernel/intern/object.c +++ b/source/blender/blenkernel/intern/object.c @@ -2634,10 +2634,16 @@ Object *BKE_object_duplicate(Main *bmain, { const bool is_subprocess = (duplicate_options & LIB_ID_DUPLICATE_IS_SUBPROCESS) != 0; const bool is_root_id = (duplicate_options & LIB_ID_DUPLICATE_IS_ROOT_ID) != 0; + int copy_flags = LIB_ID_COPY_DEFAULT; if (!is_subprocess) { BKE_main_id_newptr_and_tag_clear(bmain); } + else { + /* In case copying object is a sub-process of collection (or scene) copying, do not try to + * re-assign RB objects to existing RBW collections. */ + copy_flags |= LIB_ID_COPY_RIGID_BODY_NO_COLLECTION_HANDLING; + } if (is_root_id) { /* In case root duplicated ID is linked, assume we want to get a local copy of it and duplicate * all expected linked data. */ @@ -2649,24 +2655,22 @@ Object *BKE_object_duplicate(Main *bmain, Material ***matarar; - Object *obn = (Object *)BKE_id_copy_for_duplicate(bmain, &ob->id, dupflag); + Object *obn = (Object *)BKE_id_copy_for_duplicate(bmain, &ob->id, dupflag, copy_flags); /* 0 == full linked. */ if (dupflag == 0) { return obn; } - BKE_animdata_duplicate_id_action(bmain, &obn->id, dupflag); - if (dupflag & USER_DUP_MAT) { for (int i = 0; i < obn->totcol; i++) { - BKE_id_copy_for_duplicate(bmain, (ID *)obn->mat[i], dupflag); + BKE_id_copy_for_duplicate(bmain, (ID *)obn->mat[i], dupflag, copy_flags); } } if (dupflag & USER_DUP_PSYS) { ParticleSystem *psys; for (psys = obn->particlesystem.first; psys; psys = psys->next) { - BKE_id_copy_for_duplicate(bmain, (ID *)psys->part, dupflag); + BKE_id_copy_for_duplicate(bmain, (ID *)psys->part, dupflag, copy_flags); } } @@ -2677,77 +2681,77 @@ Object *BKE_object_duplicate(Main *bmain, switch (obn->type) { case OB_MESH: if (dupflag & USER_DUP_MESH) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_CURVE: if (dupflag & USER_DUP_CURVE) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_SURF: if (dupflag & USER_DUP_SURF) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_FONT: if (dupflag & USER_DUP_FONT) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_MBALL: if (dupflag & USER_DUP_MBALL) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_LAMP: if (dupflag & USER_DUP_LAMP) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_ARMATURE: if (dupflag & USER_DUP_ARM) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_LATTICE: if (dupflag != 0) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_CAMERA: if (dupflag != 0) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_LIGHTPROBE: if (dupflag & USER_DUP_LIGHTPROBE) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_SPEAKER: if (dupflag != 0) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_GPENCIL: if (dupflag & USER_DUP_GPENCIL) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_HAIR: if (dupflag & USER_DUP_HAIR) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_POINTCLOUD: if (dupflag & USER_DUP_POINTCLOUD) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; case OB_VOLUME: if (dupflag & USER_DUP_VOLUME) { - id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag); + id_new = BKE_id_copy_for_duplicate(bmain, id_old, dupflag, copy_flags); } break; } @@ -2758,7 +2762,7 @@ Object *BKE_object_duplicate(Main *bmain, matarar = BKE_object_material_array_p(obn); if (matarar) { for (int i = 0; i < obn->totcol; i++) { - BKE_id_copy_for_duplicate(bmain, (ID *)(*matarar)[i], dupflag); + BKE_id_copy_for_duplicate(bmain, (ID *)(*matarar)[i], dupflag, copy_flags); } } } diff --git a/source/blender/blenkernel/intern/particle.c b/source/blender/blenkernel/intern/particle.c index 50b0fb1c9f5..7b2a1af7086 100644 --- a/source/blender/blenkernel/intern/particle.c +++ b/source/blender/blenkernel/intern/particle.c @@ -4619,11 +4619,11 @@ void psys_get_particle_on_path(ParticleSimulationData *sim, pind.cache = cached ? psys->pointcache : NULL; pind.epoint = NULL; pind.bspline = (psys->part->flag & PART_HAIR_BSPLINE); - /* pind.dm disabled in editmode means we don't get effectors taken into - * account when subdividing for instance */ + /* `pind.dm` disabled in edit-mode means we don't get effectors taken into + * account when subdividing for instance. */ pind.mesh = psys_in_edit_mode(sim->depsgraph, psys) ? NULL : - psys->hair_out_mesh; /* XXX Sybren EEK */ + psys->hair_out_mesh; /* XXX(@sybren) EEK. */ init_particle_interpolation(sim->ob, psys, pa, &pind); do_particle_interpolation(psys, p, pa, t, &pind, state); diff --git a/source/blender/blenkernel/intern/preferences.c b/source/blender/blenkernel/intern/preferences.c index 8dcf6de164a..0b8e8d7c311 100644 --- a/source/blender/blenkernel/intern/preferences.c +++ b/source/blender/blenkernel/intern/preferences.c @@ -61,6 +61,15 @@ bUserAssetLibrary *BKE_preferences_asset_library_add(UserDef *userdef, return library; } +/** + * Unlink and free a library preference member. + * \note Free's \a library itself. + */ +void BKE_preferences_asset_library_remove(UserDef *userdef, bUserAssetLibrary *library) +{ + BLI_freelinkN(&userdef->asset_libraries, library); +} + void BKE_preferences_asset_library_name_set(UserDef *userdef, bUserAssetLibrary *library, const char *name) @@ -74,15 +83,6 @@ void BKE_preferences_asset_library_name_set(UserDef *userdef, sizeof(library->name)); } -/** - * Unlink and free a library preference member. - * \note Free's \a library itself. - */ -void BKE_preferences_asset_library_remove(UserDef *userdef, bUserAssetLibrary *library) -{ - BLI_freelinkN(&userdef->asset_libraries, library); -} - bUserAssetLibrary *BKE_preferences_asset_library_find_from_index(const UserDef *userdef, int index) { return BLI_findlink(&userdef->asset_libraries, index); @@ -94,6 +94,17 @@ bUserAssetLibrary *BKE_preferences_asset_library_find_from_name(const UserDef *u return BLI_findstring(&userdef->asset_libraries, name, offsetof(bUserAssetLibrary, name)); } +bUserAssetLibrary *BKE_preferences_asset_library_containing_path(const UserDef *userdef, + const char *path) +{ + LISTBASE_FOREACH (bUserAssetLibrary *, asset_lib_pref, &userdef->asset_libraries) { + if (BLI_path_contains(asset_lib_pref->path, path)) { + return asset_lib_pref; + } + } + return NULL; +} + int BKE_preferences_asset_library_get_index(const UserDef *userdef, const bUserAssetLibrary *library) { diff --git a/source/blender/blenkernel/intern/rigidbody.c b/source/blender/blenkernel/intern/rigidbody.c index 328c54fc21b..1ea659b2d41 100644 --- a/source/blender/blenkernel/intern/rigidbody.c +++ b/source/blender/blenkernel/intern/rigidbody.c @@ -302,7 +302,7 @@ void BKE_rigidbody_object_copy(Main *bmain, Object *ob_dst, const Object *ob_src ob_dst->rigidbody_object = rigidbody_copy_object(ob_src, flag); ob_dst->rigidbody_constraint = rigidbody_copy_constraint(ob_src, flag); - if (flag & LIB_ID_CREATE_NO_MAIN) { + if ((flag & (LIB_ID_CREATE_NO_MAIN | LIB_ID_COPY_RIGID_BODY_NO_COLLECTION_HANDLING)) != 0) { return; } @@ -1211,8 +1211,8 @@ RigidBodyWorld *BKE_rigidbody_world_copy(RigidBodyWorld *rbw, const int flag) id_us_plus((ID *)rbw_copy->constraints); } - if ((flag & LIB_ID_CREATE_NO_MAIN) == 0) { - /* This is a regular copy, and not a CoW copy for depsgraph evaluation */ + if ((flag & LIB_ID_COPY_SET_COPIED_ON_WRITE) == 0) { + /* This is a regular copy, and not a CoW copy for depsgraph evaluation. */ rbw_copy->shared = MEM_callocN(sizeof(*rbw_copy->shared), "RigidBodyWorld_Shared"); BKE_ptcache_copy_list(&rbw_copy->shared->ptcaches, &rbw->shared->ptcaches, LIB_ID_COPY_CACHES); rbw_copy->shared->pointcache = rbw_copy->shared->ptcaches.first; diff --git a/source/blender/blenkernel/intern/scene.c b/source/blender/blenkernel/intern/scene.c index 6b5c94a2786..a9a8cd93b1d 100644 --- a/source/blender/blenkernel/intern/scene.c +++ b/source/blender/blenkernel/intern/scene.c @@ -63,6 +63,8 @@ #include "BLI_threads.h" #include "BLI_utildefines.h" +#include "BLO_readfile.h" + #include "BLT_translation.h" #include "BKE_action.h" @@ -993,8 +995,13 @@ static void link_recurs_seq(BlendDataReader *reader, ListBase *lb) { BLO_read_list(reader, lb); - LISTBASE_FOREACH (Sequence *, seq, lb) { - if (seq->seqbase.first) { + LISTBASE_FOREACH_MUTABLE (Sequence *, seq, lb) { + /* Sanity check. */ + if (!SEQ_valid_strip_channel(seq)) { + BLI_freelinkN(lb, seq); + BLO_read_data_reports(reader)->count.vse_strips_skipped++; + } + else if (seq->seqbase.first) { link_recurs_seq(reader, &seq->seqbase); } } @@ -1794,6 +1801,7 @@ Scene *BKE_scene_duplicate(Main *bmain, Scene *sce, eSceneCopyMethod type) /* Scene duplication is always root of duplication currently. */ const bool is_subprocess = false; const bool is_root_id = true; + const int copy_flags = LIB_ID_COPY_DEFAULT; if (!is_subprocess) { BKE_main_id_newptr_and_tag_clear(bmain); @@ -1809,21 +1817,40 @@ Scene *BKE_scene_duplicate(Main *bmain, Scene *sce, eSceneCopyMethod type) /* Copy Freestyle LineStyle datablocks. */ LISTBASE_FOREACH (ViewLayer *, view_layer_dst, &sce_copy->view_layers) { LISTBASE_FOREACH (FreestyleLineSet *, lineset, &view_layer_dst->freestyle_config.linesets) { - BKE_id_copy_for_duplicate(bmain, (ID *)lineset->linestyle, duplicate_flags); + BKE_id_copy_for_duplicate(bmain, (ID *)lineset->linestyle, duplicate_flags, copy_flags); } } /* Full copy of world (included animations) */ - BKE_id_copy_for_duplicate(bmain, (ID *)sce->world, duplicate_flags); + BKE_id_copy_for_duplicate(bmain, (ID *)sce->world, duplicate_flags, copy_flags); /* Full copy of GreasePencil. */ - BKE_id_copy_for_duplicate(bmain, (ID *)sce->gpd, duplicate_flags); + BKE_id_copy_for_duplicate(bmain, (ID *)sce->gpd, duplicate_flags, copy_flags); /* Deep-duplicate collections and objects (using preferences' settings for which sub-data to * duplicate along the object itself). */ BKE_collection_duplicate( bmain, NULL, sce_copy->master_collection, duplicate_flags, LIB_ID_DUPLICATE_IS_SUBPROCESS); + /* Rigid body world collections may not be instantiated as scene's collections, ensure they + * also get properly duplicated. */ + if (sce_copy->rigidbody_world != NULL) { + if (sce_copy->rigidbody_world->group != NULL) { + BKE_collection_duplicate(bmain, + NULL, + sce_copy->rigidbody_world->group, + duplicate_flags, + LIB_ID_DUPLICATE_IS_SUBPROCESS); + } + if (sce_copy->rigidbody_world->constraints != NULL) { + BKE_collection_duplicate(bmain, + NULL, + sce_copy->rigidbody_world->constraints, + duplicate_flags, + LIB_ID_DUPLICATE_IS_SUBPROCESS); + } + } + if (!is_subprocess) { /* This code will follow into all ID links using an ID tagged with LIB_TAG_NEW. */ BKE_libblock_relink_to_newid(&sce_copy->id); @@ -2465,7 +2492,7 @@ static void scene_graph_update_tagged(Depsgraph *depsgraph, Main *bmain, bool on // DEG_debug_graph_relations_validate(depsgraph, bmain, scene); /* Flush editing data if needed. */ prepare_mesh_for_viewport_render(bmain, view_layer); - /* Update all objects: drivers, matrices, displists, etc. flags set + /* Update all objects: drivers, matrices, #DispList, etc. flags set * by depsgraph or manual, no layer check here, gets correct flushed. */ DEG_evaluate_on_refresh(depsgraph); /* Update sound system. */ @@ -2541,7 +2568,7 @@ void BKE_scene_graph_update_for_newframe_ex(Depsgraph *depsgraph, const bool cle BKE_image_editors_update_frame(bmain, scene->r.cfra); BKE_sound_set_cfra(scene->r.cfra); DEG_graph_relations_update(depsgraph); - /* Update all objects: drivers, matrices, displists, etc. flags set + /* Update all objects: drivers, matrices, #DispList, etc. flags set * by depgraph or manual, no layer check here, gets correct flushed. * * NOTE: Only update for new frame on first iteration. Second iteration is for ensuring user diff --git a/source/blender/blenkernel/intern/softbody.c b/source/blender/blenkernel/intern/softbody.c index fbc781f5eb9..b7eb9d31b23 100644 --- a/source/blender/blenkernel/intern/softbody.c +++ b/source/blender/blenkernel/intern/softbody.c @@ -2295,7 +2295,7 @@ static void softbody_calc_forces( sb_sfesf_threads_run(depsgraph, scene, ob, timenow, sb->totspring, NULL); } - /* after spring scan because it uses Effoctors too */ + /* After spring scan because it uses effectors too. */ ListBase *effectors = BKE_effectors_create(depsgraph, ob, NULL, sb->effector_weights, false); if (do_deflector) { diff --git a/source/blender/blenkernel/intern/spline_bezier.cc b/source/blender/blenkernel/intern/spline_bezier.cc index b36d7a21669..f719a1cfda2 100644 --- a/source/blender/blenkernel/intern/spline_bezier.cc +++ b/source/blender/blenkernel/intern/spline_bezier.cc @@ -289,6 +289,56 @@ void BezierSpline::transform(const blender::float4x4 &matrix) this->mark_cache_invalid(); } +static void set_handle_position(const float3 &position, + const BezierSpline::HandleType type, + const BezierSpline::HandleType type_other, + const float3 &new_value, + float3 &handle, + float3 &handle_other) +{ + /* Don't bother when the handle positions are calculated automatically anyway. */ + if (ELEM(type, BezierSpline::HandleType::Auto, BezierSpline::HandleType::Vector)) { + return; + } + + handle = new_value; + if (type_other == BezierSpline::HandleType::Align) { + /* Keep track of the old length of the opposite handle. */ + const float length = float3::distance(handle_other, position); + /* Set the other handle to directly opposite from the current handle. */ + const float3 dir = (handle - position).normalized(); + handle_other = position - dir * length; + } +} + +/** + * Set positions for the right handle of the control point, ensuring that + * aligned handles stay aligned. Has no effect for auto and vector type handles. + */ +void BezierSpline::set_handle_position_right(const int index, const blender::float3 &value) +{ + set_handle_position(positions_[index], + handle_types_right_[index], + handle_types_left_[index], + value, + handle_positions_right_[index], + handle_positions_left_[index]); +} + +/** + * Set positions for the left handle of the control point, ensuring that + * aligned handles stay aligned. Has no effect for auto and vector type handles. + */ +void BezierSpline::set_handle_position_left(const int index, const blender::float3 &value) +{ + set_handle_position(positions_[index], + handle_types_left_[index], + handle_types_right_[index], + value, + handle_positions_left_[index], + handle_positions_right_[index]); +} + bool BezierSpline::point_is_sharp(const int index) const { return ELEM(handle_types_left_[index], HandleType::Vector, HandleType::Free) || diff --git a/source/blender/blenkernel/intern/subdiv_mesh.c b/source/blender/blenkernel/intern/subdiv_mesh.c index e9cd0b70019..01bccab1bbd 100644 --- a/source/blender/blenkernel/intern/subdiv_mesh.c +++ b/source/blender/blenkernel/intern/subdiv_mesh.c @@ -50,7 +50,7 @@ typedef struct SubdivMeshContext { const Mesh *coarse_mesh; Subdiv *subdiv; Mesh *subdiv_mesh; - /* Cached custom data arrays for fastter access. */ + /* Cached custom data arrays for faster access. */ int *vert_origindex; int *edge_origindex; int *loop_origindex; diff --git a/source/blender/blenkernel/intern/tracking.c b/source/blender/blenkernel/intern/tracking.c index 068d048fd08..3cdb8e927a6 100644 --- a/source/blender/blenkernel/intern/tracking.c +++ b/source/blender/blenkernel/intern/tracking.c @@ -3014,6 +3014,61 @@ static int channels_average_error_sort(const void *a, const void *b) return 0; } +static int compare_firstlast_putting_undefined_first( + bool inverse, bool a_markerless, int a_value, bool b_markerless, int b_value) +{ + if (a_markerless && b_markerless) { + /* Neither channel has not-disabled markers, return whatever. */ + return 0; + } + if (a_markerless) { + /* Put the markerless channel first. */ + return 0; + } + if (b_markerless) { + /* Put the markerless channel first. */ + return 1; + } + + /* Both channels have markers. */ + + if (inverse) { + if (a_value < b_value) { + return 1; + } + return 0; + } + + if (a_value > b_value) { + return 1; + } + return 0; +} + +static int channels_start_sort(const void *a, const void *b) +{ + const MovieTrackingDopesheetChannel *channel_a = a; + const MovieTrackingDopesheetChannel *channel_b = b; + + return compare_firstlast_putting_undefined_first(false, + channel_a->tot_segment == 0, + channel_a->first_not_disabled_marker_framenr, + channel_b->tot_segment == 0, + channel_b->first_not_disabled_marker_framenr); +} + +static int channels_end_sort(const void *a, const void *b) +{ + const MovieTrackingDopesheetChannel *channel_a = a; + const MovieTrackingDopesheetChannel *channel_b = b; + + return compare_firstlast_putting_undefined_first(false, + channel_a->tot_segment == 0, + channel_a->last_not_disabled_marker_framenr, + channel_b->tot_segment == 0, + channel_b->last_not_disabled_marker_framenr); +} + static int channels_alpha_inverse_sort(const void *a, const void *b) { if (channels_alpha_sort(a, b)) { @@ -3053,22 +3108,51 @@ static int channels_average_error_inverse_sort(const void *a, const void *b) return 0; } +static int channels_start_inverse_sort(const void *a, const void *b) +{ + const MovieTrackingDopesheetChannel *channel_a = a; + const MovieTrackingDopesheetChannel *channel_b = b; + + return compare_firstlast_putting_undefined_first(true, + channel_a->tot_segment == 0, + channel_a->first_not_disabled_marker_framenr, + channel_b->tot_segment == 0, + channel_b->first_not_disabled_marker_framenr); +} + +static int channels_end_inverse_sort(const void *a, const void *b) +{ + const MovieTrackingDopesheetChannel *channel_a = a; + const MovieTrackingDopesheetChannel *channel_b = b; + + return compare_firstlast_putting_undefined_first(true, + channel_a->tot_segment == 0, + channel_a->last_not_disabled_marker_framenr, + channel_b->tot_segment == 0, + channel_b->last_not_disabled_marker_framenr); +} + /* Calculate frames segments at which track is tracked continuously. */ static void tracking_dopesheet_channels_segments_calc(MovieTrackingDopesheetChannel *channel) { MovieTrackingTrack *track = channel->track; int i, segment; + bool first_not_disabled_marker_framenr_set; channel->tot_segment = 0; channel->max_segment = 0; channel->total_frames = 0; + channel->first_not_disabled_marker_framenr = 0; + channel->last_not_disabled_marker_framenr = 0; + /* TODO(sergey): looks a bit code-duplicated, need to look into * logic de-duplication here. */ /* count */ i = 0; + first_not_disabled_marker_framenr_set = false; while (i < track->markersnr) { MovieTrackingMarker *marker = &track->markers[i]; @@ -3086,6 +3170,12 @@ static void tracking_dopesheet_channels_segments_calc(MovieTrackingDopesheetChan break; } + if (!first_not_disabled_marker_framenr_set) { + channel->first_not_disabled_marker_framenr = marker->framenr; + first_not_disabled_marker_framenr_set = true; + } + channel->last_not_disabled_marker_framenr = marker->framenr; + prev_fra = marker->framenr; len++; i++; @@ -3203,6 +3293,12 @@ static void tracking_dopesheet_channels_sort(MovieTracking *tracking, else if (sort_method == TRACKING_DOPE_SORT_AVERAGE_ERROR) { BLI_listbase_sort(&dopesheet->channels, channels_average_error_inverse_sort); } + else if (sort_method == TRACKING_DOPE_SORT_START) { + BLI_listbase_sort(&dopesheet->channels, channels_start_inverse_sort); + } + else if (sort_method == TRACKING_DOPE_SORT_END) { + BLI_listbase_sort(&dopesheet->channels, channels_end_inverse_sort); + } } else { if (sort_method == TRACKING_DOPE_SORT_NAME) { @@ -3217,6 +3313,12 @@ static void tracking_dopesheet_channels_sort(MovieTracking *tracking, else if (sort_method == TRACKING_DOPE_SORT_AVERAGE_ERROR) { BLI_listbase_sort(&dopesheet->channels, channels_average_error_sort); } + else if (sort_method == TRACKING_DOPE_SORT_START) { + BLI_listbase_sort(&dopesheet->channels, channels_start_sort); + } + else if (sort_method == TRACKING_DOPE_SORT_END) { + BLI_listbase_sort(&dopesheet->channels, channels_end_sort); + } } } |