Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--source/blender/io/common/IO_abstract_hierarchy_iterator.h52
-rw-r--r--source/blender/io/common/intern/abstract_hierarchy_iterator.cc89
-rw-r--r--tests/gtests/usd/abstract_hierarchy_iterator_test.cc136
3 files changed, 243 insertions, 34 deletions
diff --git a/source/blender/io/common/IO_abstract_hierarchy_iterator.h b/source/blender/io/common/IO_abstract_hierarchy_iterator.h
index 480b72ea0cf..9930b79f802 100644
--- a/source/blender/io/common/IO_abstract_hierarchy_iterator.h
+++ b/source/blender/io/common/IO_abstract_hierarchy_iterator.h
@@ -120,6 +120,39 @@ class AbstractHierarchyWriter {
virtual bool check_is_animated(const HierarchyContext &context) const;
};
+/* Determines which subset of the writers actually gets to write. */
+struct ExportSubset {
+ bool transforms : 1;
+ bool shapes : 1;
+};
+
+/* EnsuredWriter represents an AbstractHierarchyWriter* combined with information whether it was
+ * newly created or not. It's returned by AbstractHierarchyIterator::ensure_writer(). */
+class EnsuredWriter {
+ private:
+ AbstractHierarchyWriter *writer_;
+
+ /* Is set to truth when ensure_writer() did not find existing writer and created a new one.
+ * Is set to false when writer has been re-used or when allocation of the new one has failed
+ * (`writer` will be `nullptr` in that case and bool(ensured_writer) will be false). */
+ bool newly_created_;
+
+ EnsuredWriter(AbstractHierarchyWriter *writer, bool newly_created);
+
+ public:
+ EnsuredWriter();
+
+ static EnsuredWriter empty();
+ static EnsuredWriter existing(AbstractHierarchyWriter *writer);
+ static EnsuredWriter newly_created(AbstractHierarchyWriter *writer);
+
+ bool is_newly_created() const;
+
+ /* These operators make an EnsuredWriter* act as an AbstractHierarchyWriter* */
+ operator bool() const;
+ AbstractHierarchyWriter *operator->();
+};
+
/* AbstractHierarchyIterator iterates over objects in a dependency graph, and constructs export
* writers. These writers are then called to perform the actual writing to a USD or Alembic file.
*
@@ -148,6 +181,7 @@ class AbstractHierarchyIterator {
ExportPathMap duplisource_export_path_;
Depsgraph *depsgraph_;
WriterMap writers_;
+ ExportSubset export_subset_;
public:
explicit AbstractHierarchyIterator(Depsgraph *depsgraph);
@@ -161,6 +195,15 @@ class AbstractHierarchyIterator {
/* Release all writers. Call after all frames have been exported. */
void release_writers();
+ /* Determine which subset of writers is used for exporting.
+ * Set this before calling iterate_and_write().
+ *
+ * Note that writers are created for each iterated object, regardless of this option. When a
+ * writer is created it will also write the current iteration, to ensure the hierarchy is
+ * complete. The `export_subset` option is only in effect when the writer already existed from a
+ * previous iteration. */
+ void set_export_subset(ExportSubset export_subset_);
+
/* Convert the given name to something that is valid for the exported file format.
* This base implementation is a no-op; override in a concrete subclass. */
virtual std::string make_valid_name(const std::string &name) const;
@@ -209,10 +252,11 @@ class AbstractHierarchyIterator {
typedef AbstractHierarchyWriter *(AbstractHierarchyIterator::*create_writer_func)(
const HierarchyContext *);
- /* Ensure that a writer exists; if it doesn't, call create_func(context). The create_func
- * function should be one of the create_XXXX_writer(context) functions declared below. */
- AbstractHierarchyWriter *ensure_writer(HierarchyContext *context,
- create_writer_func create_func);
+ /* Ensure that a writer exists; if it doesn't, call create_func(context).
+ *
+ * The create_func function should be one of the create_XXXX_writer(context) functions declared
+ * below. */
+ EnsuredWriter ensure_writer(HierarchyContext *context, create_writer_func create_func);
protected:
/* Construct a valid path for the export file format. This class concatenates by using '/' as a
diff --git a/source/blender/io/common/intern/abstract_hierarchy_iterator.cc b/source/blender/io/common/intern/abstract_hierarchy_iterator.cc
index 5a318485203..053b22970dc 100644
--- a/source/blender/io/common/intern/abstract_hierarchy_iterator.cc
+++ b/source/blender/io/common/intern/abstract_hierarchy_iterator.cc
@@ -75,6 +75,43 @@ void HierarchyContext::mark_as_not_instanced()
original_export_path.clear();
}
+EnsuredWriter::EnsuredWriter() : writer_(nullptr), newly_created_(false)
+{
+}
+
+EnsuredWriter::EnsuredWriter(AbstractHierarchyWriter *writer, bool newly_created)
+ : writer_(writer), newly_created_(newly_created)
+{
+}
+
+EnsuredWriter EnsuredWriter::empty()
+{
+ return EnsuredWriter(nullptr, false);
+}
+EnsuredWriter EnsuredWriter::existing(AbstractHierarchyWriter *writer)
+{
+ return EnsuredWriter(writer, false);
+}
+EnsuredWriter EnsuredWriter::newly_created(AbstractHierarchyWriter *writer)
+{
+ return EnsuredWriter(writer, true);
+}
+
+bool EnsuredWriter::is_newly_created() const
+{
+ return newly_created_;
+}
+
+EnsuredWriter::operator bool() const
+{
+ return writer_ != nullptr;
+}
+
+AbstractHierarchyWriter *EnsuredWriter::operator->()
+{
+ return writer_;
+}
+
AbstractHierarchyWriter::~AbstractHierarchyWriter()
{
}
@@ -105,7 +142,7 @@ bool AbstractHierarchyWriter::check_is_animated(const HierarchyContext &context)
}
AbstractHierarchyIterator::AbstractHierarchyIterator(Depsgraph *depsgraph)
- : depsgraph_(depsgraph), writers_()
+ : depsgraph_(depsgraph), writers_(), export_subset_({true, true})
{
}
@@ -132,6 +169,11 @@ void AbstractHierarchyIterator::release_writers()
writers_.clear();
}
+void AbstractHierarchyIterator::set_export_subset(ExportSubset export_subset)
+{
+ export_subset_ = export_subset;
+}
+
std::string AbstractHierarchyIterator::make_valid_name(const std::string &name) const
{
return name;
@@ -495,7 +537,6 @@ void AbstractHierarchyIterator::determine_duplication_references(
void AbstractHierarchyIterator::make_writers(const HierarchyContext *parent_context)
{
- AbstractHierarchyWriter *transform_writer = nullptr;
float parent_matrix_inv_world[4][4];
if (parent_context) {
@@ -510,18 +551,22 @@ void AbstractHierarchyIterator::make_writers(const HierarchyContext *parent_cont
copy_m4_m4(context->parent_matrix_inv_world, parent_matrix_inv_world);
// Get or create the transform writer.
- transform_writer = ensure_writer(context, &AbstractHierarchyIterator::create_transform_writer);
- if (transform_writer == nullptr) {
+ EnsuredWriter transform_writer = ensure_writer(
+ context, &AbstractHierarchyIterator::create_transform_writer);
+
+ if (!transform_writer) {
// Unable to export, so there is nothing to attach any children to; just abort this entire
// branch of the export hierarchy.
return;
}
BLI_assert(DEG_is_evaluated_object(context->object));
- /* XXX This can lead to too many XForms being written. For example, a camera writer can refuse
- * to write an orthographic camera. By the time that this is known, the XForm has already been
- * written. */
- transform_writer->write(*context);
+ if (transform_writer.is_newly_created() || export_subset_.transforms) {
+ /* XXX This can lead to too many XForms being written. For example, a camera writer can
+ * refuse to write an orthographic camera. By the time that this is known, the XForm has
+ * already been written. */
+ transform_writer->write(*context);
+ }
if (!context->weak_export) {
make_writers_particle_systems(context);
@@ -554,13 +599,16 @@ void AbstractHierarchyIterator::make_writer_object_data(const HierarchyContext *
BLI_assert(data_context.is_instance());
}
- AbstractHierarchyWriter *data_writer;
- data_writer = ensure_writer(&data_context, &AbstractHierarchyIterator::create_data_writer);
- if (data_writer == nullptr) {
+ /* Always write upon creation, otherwise depend on which subset is active. */
+ EnsuredWriter data_writer = ensure_writer(&data_context,
+ &AbstractHierarchyIterator::create_data_writer);
+ if (!data_writer) {
return;
}
- data_writer->write(data_context);
+ if (data_writer.is_newly_created() || export_subset_.shapes) {
+ data_writer->write(data_context);
+ }
}
void AbstractHierarchyIterator::make_writers_particle_systems(
@@ -578,7 +626,7 @@ void AbstractHierarchyIterator::make_writers_particle_systems(
make_valid_name(psys->name));
hair_context.particle_system = psys;
- AbstractHierarchyWriter *writer = nullptr;
+ EnsuredWriter writer;
switch (psys->part->type) {
case PART_HAIR:
writer = ensure_writer(&hair_context, &AbstractHierarchyIterator::create_hair_writer);
@@ -587,8 +635,12 @@ void AbstractHierarchyIterator::make_writers_particle_systems(
writer = ensure_writer(&hair_context, &AbstractHierarchyIterator::create_particle_writer);
break;
}
+ if (!writer) {
+ continue;
+ }
- if (writer != nullptr) {
+ /* Always write upon creation, otherwise depend on which subset is active. */
+ if (writer.is_newly_created() || export_subset_.shapes) {
writer->write(hair_context);
}
}
@@ -616,22 +668,21 @@ AbstractHierarchyWriter *AbstractHierarchyIterator::get_writer(
return it->second;
}
-AbstractHierarchyWriter *AbstractHierarchyIterator::ensure_writer(
+EnsuredWriter AbstractHierarchyIterator::ensure_writer(
HierarchyContext *context, AbstractHierarchyIterator::create_writer_func create_func)
{
AbstractHierarchyWriter *writer = get_writer(context->export_path);
if (writer != nullptr) {
- return writer;
+ return EnsuredWriter::existing(writer);
}
writer = (this->*create_func)(context);
if (writer == nullptr) {
- return nullptr;
+ return EnsuredWriter::empty();
}
writers_[context->export_path] = writer;
-
- return writer;
+ return EnsuredWriter::newly_created(writer);
}
std::string AbstractHierarchyIterator::path_concatenate(const std::string &parent_path,
diff --git a/tests/gtests/usd/abstract_hierarchy_iterator_test.cc b/tests/gtests/usd/abstract_hierarchy_iterator_test.cc
index f2e8b09b8d3..1ca0ad93470 100644
--- a/tests/gtests/usd/abstract_hierarchy_iterator_test.cc
+++ b/tests/gtests/usd/abstract_hierarchy_iterator_test.cc
@@ -30,16 +30,16 @@ extern "C" {
/* Mapping from ID.name to set of export hierarchy path. Duplicated objects can be exported
* multiple times with different export paths, hence the set. */
-typedef std::map<std::string, std::set<std::string>> created_writers;
+typedef std::map<std::string, std::set<std::string>> used_writers;
using namespace blender::io;
class TestHierarchyWriter : public AbstractHierarchyWriter {
public:
std::string writer_type;
- created_writers &writers_map;
+ used_writers &writers_map;
- TestHierarchyWriter(const std::string &writer_type, created_writers &writers_map)
+ TestHierarchyWriter(const std::string &writer_type, used_writers &writers_map)
: writer_type(writer_type), writers_map(writers_map)
{
}
@@ -47,7 +47,7 @@ class TestHierarchyWriter : public AbstractHierarchyWriter {
void write(HierarchyContext &context) override
{
const char *id_name = context.object->id.name;
- created_writers::mapped_type &writers = writers_map[id_name];
+ used_writers::mapped_type &writers = writers_map[id_name];
if (writers.find(context.export_path) != writers.end()) {
ADD_FAILURE() << "Unexpectedly found another " << writer_type << " writer for " << id_name
@@ -57,7 +57,7 @@ class TestHierarchyWriter : public AbstractHierarchyWriter {
}
};
-void debug_print_writers(const char *label, const created_writers &writers_map)
+void debug_print_writers(const char *label, const used_writers &writers_map)
{
printf("%s:\n", label);
for (auto idname_writers : writers_map) {
@@ -70,10 +70,10 @@ void debug_print_writers(const char *label, const created_writers &writers_map)
class TestingHierarchyIterator : public AbstractHierarchyIterator {
public: /* Public so that the test cases can directly inspect the created writers. */
- created_writers transform_writers;
- created_writers data_writers;
- created_writers hair_writers;
- created_writers particle_writers;
+ used_writers transform_writers;
+ used_writers data_writers;
+ used_writers hair_writers;
+ used_writers particle_writers;
public:
explicit TestingHierarchyIterator(Depsgraph *depsgraph) : AbstractHierarchyIterator(depsgraph)
@@ -151,7 +151,7 @@ TEST_F(USDHierarchyIteratorTest, ExportHierarchyTest)
iterator->iterate_and_write();
// Mapping from object name to set of export paths.
- created_writers expected_transforms = {
+ used_writers expected_transforms = {
{"OBCamera", {"/Camera"}},
{"OBDupli1", {"/Dupli1"}},
{"OBDupli2", {"/ParentOfDupli2/Dupli2"}},
@@ -177,7 +177,7 @@ TEST_F(USDHierarchyIteratorTest, ExportHierarchyTest)
{"OBParentOfDupli2", {"/ParentOfDupli2"}}};
EXPECT_EQ(expected_transforms, iterator->transform_writers);
- created_writers expected_data = {
+ used_writers expected_data = {
{"OBCamera", {"/Camera/Camera"}},
{"OBGEO_Ear_L",
{"/Dupli1/GEO_Head-0/GEO_Ear_L-1/Ear",
@@ -204,4 +204,118 @@ TEST_F(USDHierarchyIteratorTest, ExportHierarchyTest)
// The scene has no hair or particle systems.
EXPECT_EQ(0, iterator->hair_writers.size());
EXPECT_EQ(0, iterator->particle_writers.size());
+
+ // On the second iteration, everything should be written as well.
+ // This tests the default value of iterator->export_subset_.
+ iterator->transform_writers.clear();
+ iterator->data_writers.clear();
+ iterator->iterate_and_write();
+ EXPECT_EQ(expected_transforms, iterator->transform_writers);
+ EXPECT_EQ(expected_data, iterator->data_writers);
+}
+
+TEST_F(USDHierarchyIteratorTest, ExportSubsetTest)
+{
+ // The scene has no hair or particle systems, and this is already covered by ExportHierarchyTest,
+ // so not included here. Update this test when hair & particle systems are included.
+
+ /* Load the test blend file. */
+ if (!blendfile_load("usd/usd_hierarchy_export_test.blend")) {
+ return;
+ }
+ depsgraph_create(DAG_EVAL_RENDER);
+ iterator_create();
+
+ // Mapping from object name to set of export paths.
+ used_writers expected_transforms = {
+ {"OBCamera", {"/Camera"}},
+ {"OBDupli1", {"/Dupli1"}},
+ {"OBDupli2", {"/ParentOfDupli2/Dupli2"}},
+ {"OBGEO_Ear_L",
+ {"/Dupli1/GEO_Head-0/GEO_Ear_L-1",
+ "/Ground plane/OutsideDupliGrandParent/OutsideDupliParent/GEO_Head/GEO_Ear_L",
+ "/ParentOfDupli2/Dupli2/GEO_Head-0/GEO_Ear_L-1"}},
+ {"OBGEO_Ear_R",
+ {"/Dupli1/GEO_Head-0/GEO_Ear_R-2",
+ "/Ground plane/OutsideDupliGrandParent/OutsideDupliParent/GEO_Head/GEO_Ear_R",
+ "/ParentOfDupli2/Dupli2/GEO_Head-0/GEO_Ear_R-2"}},
+ {"OBGEO_Head",
+ {"/Dupli1/GEO_Head-0",
+ "/Ground plane/OutsideDupliGrandParent/OutsideDupliParent/GEO_Head",
+ "/ParentOfDupli2/Dupli2/GEO_Head-0"}},
+ {"OBGEO_Nose",
+ {"/Dupli1/GEO_Head-0/GEO_Nose-3",
+ "/Ground plane/OutsideDupliGrandParent/OutsideDupliParent/GEO_Head/GEO_Nose",
+ "/ParentOfDupli2/Dupli2/GEO_Head-0/GEO_Nose-3"}},
+ {"OBGround plane", {"/Ground plane"}},
+ {"OBOutsideDupliGrandParent", {"/Ground plane/OutsideDupliGrandParent"}},
+ {"OBOutsideDupliParent", {"/Ground plane/OutsideDupliGrandParent/OutsideDupliParent"}},
+ {"OBParentOfDupli2", {"/ParentOfDupli2"}}};
+
+ used_writers expected_data = {
+ {"OBCamera", {"/Camera/Camera"}},
+ {"OBGEO_Ear_L",
+ {"/Dupli1/GEO_Head-0/GEO_Ear_L-1/Ear",
+ "/Ground plane/OutsideDupliGrandParent/OutsideDupliParent/GEO_Head/GEO_Ear_L/Ear",
+ "/ParentOfDupli2/Dupli2/GEO_Head-0/GEO_Ear_L-1/Ear"}},
+ {"OBGEO_Ear_R",
+ {"/Dupli1/GEO_Head-0/GEO_Ear_R-2/Ear",
+ "/Ground plane/OutsideDupliGrandParent/OutsideDupliParent/GEO_Head/GEO_Ear_R/Ear",
+ "/ParentOfDupli2/Dupli2/GEO_Head-0/GEO_Ear_R-2/Ear"}},
+ {"OBGEO_Head",
+ {"/Dupli1/GEO_Head-0/Face",
+ "/Ground plane/OutsideDupliGrandParent/OutsideDupliParent/GEO_Head/Face",
+ "/ParentOfDupli2/Dupli2/GEO_Head-0/Face"}},
+ {"OBGEO_Nose",
+ {"/Dupli1/GEO_Head-0/GEO_Nose-3/Nose",
+ "/Ground plane/OutsideDupliGrandParent/OutsideDupliParent/GEO_Head/GEO_Nose/Nose",
+ "/ParentOfDupli2/Dupli2/GEO_Head-0/GEO_Nose-3/Nose"}},
+ {"OBGround plane", {"/Ground plane/Plane"}},
+ {"OBParentOfDupli2", {"/ParentOfDupli2/Icosphere"}},
+ };
+
+ // Even when only asking an export of transforms, on the first frame everything should be
+ // exported.
+ iterator->set_export_subset({
+ .transforms = true,
+ .shapes = false,
+ });
+ iterator->iterate_and_write();
+ EXPECT_EQ(expected_transforms, iterator->transform_writers);
+ EXPECT_EQ(expected_data, iterator->data_writers);
+
+ // Clear data to prepare for the next iteration.
+ iterator->transform_writers.clear();
+ iterator->data_writers.clear();
+
+ // Second iteration, should only write transforms now.
+ iterator->iterate_and_write();
+ EXPECT_EQ(expected_transforms, iterator->transform_writers);
+ EXPECT_EQ(0, iterator->data_writers.size());
+
+ // Clear data to prepare for the next iteration.
+ iterator->transform_writers.clear();
+ iterator->data_writers.clear();
+
+ // Third iteration, should only write data now.
+ iterator->set_export_subset({
+ .transforms = false,
+ .shapes = true,
+ });
+ iterator->iterate_and_write();
+ EXPECT_EQ(0, iterator->transform_writers.size());
+ EXPECT_EQ(expected_data, iterator->data_writers);
+
+ // Clear data to prepare for the next iteration.
+ iterator->transform_writers.clear();
+ iterator->data_writers.clear();
+
+ // Fourth iteration, should export everything now.
+ iterator->set_export_subset({
+ .transforms = true,
+ .shapes = true,
+ });
+ iterator->iterate_and_write();
+ EXPECT_EQ(expected_transforms, iterator->transform_writers);
+ EXPECT_EQ(expected_data, iterator->data_writers);
}