Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'source/blender/blenkernel/intern/node_tree_update.cc')
-rw-r--r--source/blender/blenkernel/intern/node_tree_update.cc540
1 files changed, 247 insertions, 293 deletions
diff --git a/source/blender/blenkernel/intern/node_tree_update.cc b/source/blender/blenkernel/intern/node_tree_update.cc
index 019ab114b83..b2caaa912d7 100644
--- a/source/blender/blenkernel/intern/node_tree_update.cc
+++ b/source/blender/blenkernel/intern/node_tree_update.cc
@@ -5,6 +5,7 @@
#include "BLI_noise.hh"
#include "BLI_set.hh"
#include "BLI_stack.hh"
+#include "BLI_timeit.hh"
#include "BLI_vector_set.hh"
#include "DNA_anim_types.h"
@@ -21,7 +22,6 @@
#include "MOD_nodes.h"
#include "NOD_node_declaration.hh"
-#include "NOD_node_tree_ref.hh"
#include "NOD_texture.h"
#include "DEG_depsgraph_query.h"
@@ -50,6 +50,7 @@ enum eNodeTreeChangedFlag {
static void add_tree_tag(bNodeTree *ntree, const eNodeTreeChangedFlag flag)
{
ntree->runtime->changed_flag |= flag;
+ ntree->runtime->topology_cache_is_dirty = true;
}
static void add_node_tag(bNodeTree *ntree, bNode *node, const eNodeTreeChangedFlag flag)
@@ -73,28 +74,32 @@ static bool is_field_socket_type(eNodeSocketDatatype type)
return ELEM(type, SOCK_FLOAT, SOCK_INT, SOCK_BOOLEAN, SOCK_VECTOR, SOCK_RGBA);
}
-static bool is_field_socket_type(const SocketRef &socket)
+static bool is_field_socket_type(const bNodeSocket &socket)
{
- return is_field_socket_type((eNodeSocketDatatype)socket.typeinfo()->type);
+ return is_field_socket_type((eNodeSocketDatatype)socket.typeinfo->type);
}
-static InputSocketFieldType get_interface_input_field_type(const NodeRef &node,
- const InputSocketRef &socket)
+static InputSocketFieldType get_interface_input_field_type(const bNode &node,
+ const bNodeSocket &socket)
{
if (!is_field_socket_type(socket)) {
return InputSocketFieldType::None;
}
- if (node.is_reroute_node()) {
+ if (node.type == NODE_REROUTE) {
return InputSocketFieldType::IsSupported;
}
- if (node.is_group_output_node()) {
+ if (node.type == NODE_GROUP_OUTPUT) {
/* Outputs always support fields when the data type is correct. */
return InputSocketFieldType::IsSupported;
}
- if (node.is_undefined()) {
+ if (node.typeinfo == &NodeTypeUndefined) {
+ return InputSocketFieldType::None;
+ }
+ if (node.type == NODE_CUSTOM) {
return InputSocketFieldType::None;
}
+ /* TODO: Ensure declaration exists. */
const NodeDeclaration *node_decl = node.declaration();
/* Node declarations should be implemented for nodes involved here. */
@@ -113,22 +118,25 @@ static InputSocketFieldType get_interface_input_field_type(const NodeRef &node,
return field_type;
}
-static OutputFieldDependency get_interface_output_field_dependency(const NodeRef &node,
- const OutputSocketRef &socket)
+static OutputFieldDependency get_interface_output_field_dependency(const bNode &node,
+ const bNodeSocket &socket)
{
if (!is_field_socket_type(socket)) {
/* Non-field sockets always output data. */
return OutputFieldDependency::ForDataSource();
}
- if (node.is_reroute_node()) {
+ if (node.type == NODE_REROUTE) {
/* The reroute just forwards what is passed in. */
return OutputFieldDependency::ForDependentField();
}
- if (node.is_group_input_node()) {
+ if (node.type == NODE_GROUP_INPUT) {
/* Input nodes get special treatment in #determine_group_input_states. */
return OutputFieldDependency::ForDependentField();
}
- if (node.is_undefined()) {
+ if (node.typeinfo == &NodeTypeUndefined) {
+ return OutputFieldDependency::ForDataSource();
+ }
+ if (node.type == NODE_CUSTOM) {
return OutputFieldDependency::ForDataSource();
}
@@ -147,12 +155,13 @@ static OutputFieldDependency get_interface_output_field_dependency(const NodeRef
return socket_decl.output_field_dependency();
}
-static FieldInferencingInterface get_dummy_field_inferencing_interface(const NodeRef &node)
+static FieldInferencingInterface get_dummy_field_inferencing_interface(const bNode &node)
{
FieldInferencingInterface inferencing_interface;
- inferencing_interface.inputs.append_n_times(InputSocketFieldType::None, node.inputs().size());
+ inferencing_interface.inputs.append_n_times(InputSocketFieldType::None,
+ node.input_sockets().size());
inferencing_interface.outputs.append_n_times(OutputFieldDependency::ForDataSource(),
- node.outputs().size());
+ node.output_sockets().size());
return inferencing_interface;
}
@@ -161,11 +170,11 @@ static FieldInferencingInterface get_dummy_field_inferencing_interface(const Nod
* In the future, this information can be stored in the node declaration. This would allow this
* function to return a reference, making it more efficient.
*/
-static FieldInferencingInterface get_node_field_inferencing_interface(const NodeRef &node)
+static FieldInferencingInterface get_node_field_inferencing_interface(const bNode &node)
{
/* Node groups already reference all required information, so just return that. */
- if (node.is_group_node()) {
- bNodeTree *group = (bNodeTree *)node.bnode()->id;
+ if (node.is_group()) {
+ bNodeTree *group = (bNodeTree *)node.id;
if (group == nullptr) {
return FieldInferencingInterface();
}
@@ -181,11 +190,11 @@ static FieldInferencingInterface get_node_field_inferencing_interface(const Node
}
FieldInferencingInterface inferencing_interface;
- for (const InputSocketRef *input_socket : node.inputs()) {
+ for (const bNodeSocket *input_socket : node.input_sockets()) {
inferencing_interface.inputs.append(get_interface_input_field_type(node, *input_socket));
}
- for (const OutputSocketRef *output_socket : node.outputs()) {
+ for (const bNodeSocket *output_socket : node.output_sockets()) {
inferencing_interface.outputs.append(
get_interface_output_field_dependency(node, *output_socket));
}
@@ -209,11 +218,11 @@ struct SocketFieldState {
bool requires_single = false;
};
-static Vector<const InputSocketRef *> gather_input_socket_dependencies(
- const OutputFieldDependency &field_dependency, const NodeRef &node)
+static Vector<const bNodeSocket *> gather_input_socket_dependencies(
+ const OutputFieldDependency &field_dependency, const bNode &node)
{
const OutputSocketFieldType type = field_dependency.field_type();
- Vector<const InputSocketRef *> input_sockets;
+ Vector<const bNodeSocket *> input_sockets;
switch (type) {
case OutputSocketFieldType::FieldSource:
case OutputSocketFieldType::None: {
@@ -221,13 +230,13 @@ static Vector<const InputSocketRef *> gather_input_socket_dependencies(
}
case OutputSocketFieldType::DependentField: {
/* This output depends on all inputs. */
- input_sockets.extend(node.inputs());
+ input_sockets.extend(node.input_sockets());
break;
}
case OutputSocketFieldType::PartiallyDependent: {
/* This output depends only on a few inputs. */
for (const int i : field_dependency.linked_input_indices()) {
- input_sockets.append(&node.input(i));
+ input_sockets.append(&node.input_socket(i));
}
break;
}
@@ -240,8 +249,7 @@ static Vector<const InputSocketRef *> gather_input_socket_dependencies(
* to figure out if it is always a field or if it depends on any group inputs.
*/
static OutputFieldDependency find_group_output_dependencies(
- const InputSocketRef &group_output_socket,
- const Span<SocketFieldState> field_state_by_socket_id)
+ const bNodeSocket &group_output_socket, const Span<SocketFieldState> field_state_by_socket_id)
{
if (!is_field_socket_type(group_output_socket)) {
return OutputFieldDependency::ForDataSource();
@@ -249,8 +257,8 @@ static OutputFieldDependency find_group_output_dependencies(
/* Use a Set here instead of an array indexed by socket id, because we my only need to look at
* very few sockets. */
- Set<const InputSocketRef *> handled_sockets;
- Stack<const InputSocketRef *> sockets_to_check;
+ Set<const bNodeSocket *> handled_sockets;
+ Stack<const bNodeSocket *> sockets_to_check;
handled_sockets.add(&group_output_socket);
sockets_to_check.push(&group_output_socket);
@@ -259,20 +267,21 @@ static OutputFieldDependency find_group_output_dependencies(
Vector<int> linked_input_indices;
while (!sockets_to_check.is_empty()) {
- const InputSocketRef *input_socket = sockets_to_check.pop();
+ const bNodeSocket *input_socket = sockets_to_check.pop();
if (!input_socket->is_directly_linked() &&
- !field_state_by_socket_id[input_socket->id()].is_single) {
+ !field_state_by_socket_id[input_socket->index_in_tree()].is_single) {
/* This socket uses a field as input by default. */
return OutputFieldDependency::ForFieldSource();
}
- for (const OutputSocketRef *origin_socket : input_socket->directly_linked_sockets()) {
- const NodeRef &origin_node = origin_socket->node();
- const SocketFieldState &origin_state = field_state_by_socket_id[origin_socket->id()];
+ for (const bNodeSocket *origin_socket : input_socket->directly_linked_sockets()) {
+ const bNode &origin_node = origin_socket->owner_node();
+ const SocketFieldState &origin_state =
+ field_state_by_socket_id[origin_socket->index_in_tree()];
if (origin_state.is_field_source) {
- if (origin_node.is_group_input_node()) {
+ if (origin_node.type == NODE_GROUP_INPUT) {
/* Found a group input that the group output depends on. */
linked_input_indices.append_non_duplicates(origin_socket->index());
}
@@ -288,12 +297,12 @@ static OutputFieldDependency find_group_output_dependencies(
inferencing_interface.outputs[origin_socket->index()];
/* Propagate search further to the left. */
- for (const InputSocketRef *origin_input_socket :
+ for (const bNodeSocket *origin_input_socket :
gather_input_socket_dependencies(field_dependency, origin_node)) {
if (!origin_input_socket->is_available()) {
continue;
}
- if (!field_state_by_socket_id[origin_input_socket->id()].is_single) {
+ if (!field_state_by_socket_id[origin_input_socket->index_in_tree()].is_single) {
if (handled_sockets.add(origin_input_socket)) {
sockets_to_check.push(origin_input_socket);
}
@@ -306,17 +315,16 @@ static OutputFieldDependency find_group_output_dependencies(
}
static void propagate_data_requirements_from_right_to_left(
- const NodeTreeRef &tree, const MutableSpan<SocketFieldState> field_state_by_socket_id)
+ const bNodeTree &tree, const MutableSpan<SocketFieldState> field_state_by_socket_id)
{
- const NodeTreeRef::ToposortResult toposort_result = tree.toposort(
- NodeTreeRef::ToposortDirection::RightToLeft);
+ const Span<const bNode *> toposort_result = tree.toposort_right_to_left();
- for (const NodeRef *node : toposort_result.sorted_nodes) {
+ for (const bNode *node : toposort_result) {
const FieldInferencingInterface inferencing_interface = get_node_field_inferencing_interface(
*node);
- for (const OutputSocketRef *output_socket : node->outputs()) {
- SocketFieldState &state = field_state_by_socket_id[output_socket->id()];
+ for (const bNodeSocket *output_socket : node->output_sockets()) {
+ SocketFieldState &state = field_state_by_socket_id[output_socket->index_in_tree()];
const OutputFieldDependency &field_dependency =
inferencing_interface.outputs[output_socket->index()];
@@ -332,17 +340,18 @@ static void propagate_data_requirements_from_right_to_left(
/* The output is required to be a single value when it is connected to any input that does
* not support fields. */
- for (const InputSocketRef *target_socket : output_socket->directly_linked_sockets()) {
+ for (const bNodeSocket *target_socket : output_socket->directly_linked_sockets()) {
if (target_socket->is_available()) {
- state.requires_single |= field_state_by_socket_id[target_socket->id()].requires_single;
+ state.requires_single |=
+ field_state_by_socket_id[target_socket->index_in_tree()].requires_single;
}
}
if (state.requires_single) {
bool any_input_is_field_implicitly = false;
- const Vector<const InputSocketRef *> connected_inputs = gather_input_socket_dependencies(
+ const Vector<const bNodeSocket *> connected_inputs = gather_input_socket_dependencies(
field_dependency, *node);
- for (const InputSocketRef *input_socket : connected_inputs) {
+ for (const bNodeSocket *input_socket : connected_inputs) {
if (!input_socket->is_available()) {
continue;
}
@@ -361,16 +370,16 @@ static void propagate_data_requirements_from_right_to_left(
else {
/* If the output is required to be a single value, the connected inputs in the same node
* must not be fields as well. */
- for (const InputSocketRef *input_socket : connected_inputs) {
- field_state_by_socket_id[input_socket->id()].requires_single = true;
+ for (const bNodeSocket *input_socket : connected_inputs) {
+ field_state_by_socket_id[input_socket->index_in_tree()].requires_single = true;
}
}
}
}
/* Some inputs do not require fields independent of what the outputs are connected to. */
- for (const InputSocketRef *input_socket : node->inputs()) {
- SocketFieldState &state = field_state_by_socket_id[input_socket->id()];
+ for (const bNodeSocket *input_socket : node->input_sockets()) {
+ SocketFieldState &state = field_state_by_socket_id[input_socket->index_in_tree()];
if (inferencing_interface.inputs[input_socket->index()] == InputSocketFieldType::None) {
state.requires_single = true;
state.is_always_single = true;
@@ -380,14 +389,14 @@ static void propagate_data_requirements_from_right_to_left(
}
static void determine_group_input_states(
- const NodeTreeRef &tree,
+ const bNodeTree &tree,
FieldInferencingInterface &new_inferencing_interface,
const MutableSpan<SocketFieldState> field_state_by_socket_id)
{
{
/* Non-field inputs never support fields. */
int index;
- LISTBASE_FOREACH_INDEX (bNodeSocket *, group_input, &tree.btree()->inputs, index) {
+ LISTBASE_FOREACH_INDEX (bNodeSocket *, group_input, &tree.inputs, index) {
if (!is_field_socket_type((eNodeSocketDatatype)group_input->type)) {
new_inferencing_interface.inputs[index] = InputSocketFieldType::None;
}
@@ -395,18 +404,18 @@ static void determine_group_input_states(
}
/* Check if group inputs are required to be single values, because they are (indirectly)
* connected to some socket that does not support fields. */
- for (const NodeRef *node : tree.nodes_by_type("NodeGroupInput")) {
- for (const OutputSocketRef *output_socket : node->outputs().drop_back(1)) {
- SocketFieldState &state = field_state_by_socket_id[output_socket->id()];
+ for (const bNode *node : tree.nodes_by_type("NodeGroupInput")) {
+ for (const bNodeSocket *output_socket : node->output_sockets().drop_back(1)) {
+ SocketFieldState &state = field_state_by_socket_id[output_socket->index_in_tree()];
if (state.requires_single) {
new_inferencing_interface.inputs[output_socket->index()] = InputSocketFieldType::None;
}
}
}
/* If an input does not support fields, this should be reflected in all Group Input nodes. */
- for (const NodeRef *node : tree.nodes_by_type("NodeGroupInput")) {
- for (const OutputSocketRef *output_socket : node->outputs().drop_back(1)) {
- SocketFieldState &state = field_state_by_socket_id[output_socket->id()];
+ for (const bNode *node : tree.nodes_by_type("NodeGroupInput")) {
+ for (const bNodeSocket *output_socket : node->output_sockets().drop_back(1)) {
+ SocketFieldState &state = field_state_by_socket_id[output_socket->index_in_tree()];
const bool supports_field = new_inferencing_interface.inputs[output_socket->index()] !=
InputSocketFieldType::None;
if (supports_field) {
@@ -417,19 +426,19 @@ static void determine_group_input_states(
state.requires_single = true;
}
}
- SocketFieldState &dummy_socket_state = field_state_by_socket_id[node->outputs().last()->id()];
+ SocketFieldState &dummy_socket_state =
+ field_state_by_socket_id[node->output_sockets().last()->index_in_tree()];
dummy_socket_state.requires_single = true;
}
}
static void propagate_field_status_from_left_to_right(
- const NodeTreeRef &tree, const MutableSpan<SocketFieldState> field_state_by_socket_id)
+ const bNodeTree &tree, const MutableSpan<SocketFieldState> field_state_by_socket_id)
{
- const NodeTreeRef::ToposortResult toposort_result = tree.toposort(
- NodeTreeRef::ToposortDirection::LeftToRight);
+ const Span<const bNode *> toposort_result = tree.toposort_left_to_right();
- for (const NodeRef *node : toposort_result.sorted_nodes) {
- if (node->is_group_input_node()) {
+ for (const bNode *node : toposort_result) {
+ if (node->type == NODE_GROUP_INPUT) {
continue;
}
@@ -437,22 +446,22 @@ static void propagate_field_status_from_left_to_right(
*node);
/* Update field state of input sockets, also taking into account linked origin sockets. */
- for (const InputSocketRef *input_socket : node->inputs()) {
- SocketFieldState &state = field_state_by_socket_id[input_socket->id()];
+ for (const bNodeSocket *input_socket : node->input_sockets()) {
+ SocketFieldState &state = field_state_by_socket_id[input_socket->index_in_tree()];
if (state.is_always_single) {
state.is_single = true;
continue;
}
state.is_single = true;
- if (input_socket->directly_linked_sockets().is_empty()) {
+ if (!input_socket->is_directly_linked()) {
if (inferencing_interface.inputs[input_socket->index()] ==
InputSocketFieldType::Implicit) {
state.is_single = false;
}
}
else {
- for (const OutputSocketRef *origin_socket : input_socket->directly_linked_sockets()) {
- if (!field_state_by_socket_id[origin_socket->id()].is_single) {
+ for (const bNodeSocket *origin_socket : input_socket->directly_linked_sockets()) {
+ if (!field_state_by_socket_id[origin_socket->index_in_tree()].is_single) {
state.is_single = false;
break;
}
@@ -461,8 +470,8 @@ static void propagate_field_status_from_left_to_right(
}
/* Update field state of output sockets, also taking into account input sockets. */
- for (const OutputSocketRef *output_socket : node->outputs()) {
- SocketFieldState &state = field_state_by_socket_id[output_socket->id()];
+ for (const bNodeSocket *output_socket : node->output_sockets()) {
+ SocketFieldState &state = field_state_by_socket_id[output_socket->index_in_tree()];
const OutputFieldDependency &field_dependency =
inferencing_interface.outputs[output_socket->index()];
@@ -478,12 +487,12 @@ static void propagate_field_status_from_left_to_right(
}
case OutputSocketFieldType::PartiallyDependent:
case OutputSocketFieldType::DependentField: {
- for (const InputSocketRef *input_socket :
+ for (const bNodeSocket *input_socket :
gather_input_socket_dependencies(field_dependency, *node)) {
if (!input_socket->is_available()) {
continue;
}
- if (!field_state_by_socket_id[input_socket->id()].is_single) {
+ if (!field_state_by_socket_id[input_socket->index_in_tree()].is_single) {
state.is_single = false;
break;
}
@@ -495,17 +504,18 @@ static void propagate_field_status_from_left_to_right(
}
}
-static void determine_group_output_states(const NodeTreeRef &tree,
+static void determine_group_output_states(const bNodeTree &tree,
FieldInferencingInterface &new_inferencing_interface,
const Span<SocketFieldState> field_state_by_socket_id)
{
- for (const NodeRef *group_output_node : tree.nodes_by_type("NodeGroupOutput")) {
+ for (const bNode *group_output_node : tree.nodes_by_type("NodeGroupOutput")) {
/* Ignore inactive group output nodes. */
- if (!(group_output_node->bnode()->flag & NODE_DO_OUTPUT)) {
+ if (!(group_output_node->flag & NODE_DO_OUTPUT)) {
continue;
}
/* Determine dependencies of all group outputs. */
- for (const InputSocketRef *group_output_socket : group_output_node->inputs().drop_back(1)) {
+ for (const bNodeSocket *group_output_socket :
+ group_output_node->input_sockets().drop_back(1)) {
OutputFieldDependency field_dependency = find_group_output_dependencies(
*group_output_socket, field_state_by_socket_id);
new_inferencing_interface.outputs[group_output_socket->index()] = std::move(
@@ -515,7 +525,7 @@ static void determine_group_output_states(const NodeTreeRef &tree,
}
}
-static void update_socket_shapes(const NodeTreeRef &tree,
+static void update_socket_shapes(const bNodeTree &tree,
const Span<SocketFieldState> field_state_by_socket_id)
{
const eNodeSocketDisplayShape requires_data_shape = SOCK_DISPLAY_SHAPE_CIRCLE;
@@ -535,32 +545,30 @@ static void update_socket_shapes(const NodeTreeRef &tree,
return data_but_can_be_field_shape;
};
- for (const InputSocketRef *socket : tree.input_sockets()) {
- bNodeSocket *bsocket = socket->bsocket();
- const SocketFieldState &state = field_state_by_socket_id[socket->id()];
- bsocket->display_shape = get_shape_for_state(state);
+ for (const bNodeSocket *socket : tree.all_input_sockets()) {
+ const SocketFieldState &state = field_state_by_socket_id[socket->index_in_tree()];
+ const_cast<bNodeSocket *>(socket)->display_shape = get_shape_for_state(state);
}
- for (const OutputSocketRef *socket : tree.output_sockets()) {
- bNodeSocket *bsocket = socket->bsocket();
- const SocketFieldState &state = field_state_by_socket_id[socket->id()];
- bsocket->display_shape = get_shape_for_state(state);
+ for (const bNodeSocket *socket : tree.all_sockets()) {
+ const SocketFieldState &state = field_state_by_socket_id[socket->index_in_tree()];
+ const_cast<bNodeSocket *>(socket)->display_shape = get_shape_for_state(state);
}
}
-static bool update_field_inferencing(const NodeTreeRef &tree)
+static bool update_field_inferencing(const bNodeTree &tree)
{
- bNodeTree &btree = *tree.btree();
+ tree.ensure_topology_cache();
/* Create new inferencing interface for this node group. */
std::unique_ptr<FieldInferencingInterface> new_inferencing_interface =
std::make_unique<FieldInferencingInterface>();
- new_inferencing_interface->inputs.resize(BLI_listbase_count(&btree.inputs),
+ new_inferencing_interface->inputs.resize(BLI_listbase_count(&tree.inputs),
InputSocketFieldType::IsSupported);
- new_inferencing_interface->outputs.resize(BLI_listbase_count(&btree.outputs),
+ new_inferencing_interface->outputs.resize(BLI_listbase_count(&tree.outputs),
OutputFieldDependency::ForDataSource());
/* Keep track of the state of all sockets. The index into this array is #SocketRef::id(). */
- Array<SocketFieldState> field_state_by_socket_id(tree.sockets().size());
+ Array<SocketFieldState> field_state_by_socket_id(tree.all_sockets().size());
propagate_data_requirements_from_right_to_left(tree, field_state_by_socket_id);
determine_group_input_states(tree, *new_inferencing_interface, field_state_by_socket_id);
@@ -569,10 +577,10 @@ static bool update_field_inferencing(const NodeTreeRef &tree)
update_socket_shapes(tree, field_state_by_socket_id);
/* Update the previous group interface. */
- const bool group_interface_changed = !btree.runtime->field_inferencing_interface ||
- *btree.runtime->field_inferencing_interface !=
+ const bool group_interface_changed = !tree.runtime->field_inferencing_interface ||
+ *tree.runtime->field_inferencing_interface !=
*new_inferencing_interface;
- btree.runtime->field_inferencing_interface = std::move(new_inferencing_interface);
+ tree.runtime->field_inferencing_interface = std::move(new_inferencing_interface);
return group_interface_changed;
}
@@ -973,29 +981,22 @@ class NodeTreeMainUpdater {
{
TreeUpdateResult result;
- /* Use a #NodeTreeRef to speedup certain queries. It is rebuilt whenever the node tree topology
- * changes, which typically happens zero or one times during the entire update of the node
- * tree. */
- std::unique_ptr<NodeTreeRef> tree_ref;
- this->ensure_tree_ref(ntree, tree_ref);
-
- this->update_socket_link_and_use(*tree_ref);
- this->update_individual_nodes(ntree, tree_ref);
- this->update_internal_links(ntree, tree_ref);
- this->update_generic_callback(ntree, tree_ref);
+ this->update_socket_link_and_use(ntree);
+ this->update_individual_nodes(ntree);
+ this->update_internal_links(ntree);
+ this->update_generic_callback(ntree);
this->remove_unused_previews_when_necessary(ntree);
- this->ensure_tree_ref(ntree, tree_ref);
- this->propagate_runtime_flags(*tree_ref);
+ this->propagate_runtime_flags(ntree);
if (ntree.type == NTREE_GEOMETRY) {
- if (node_field_inferencing::update_field_inferencing(*tree_ref)) {
+ if (node_field_inferencing::update_field_inferencing(ntree)) {
result.interface_changed = true;
}
}
- result.output_changed = this->check_if_output_changed(*tree_ref);
+ result.output_changed = this->check_if_output_changed(ntree);
- this->update_socket_link_and_use(*tree_ref);
+ this->update_socket_link_and_use(ntree);
this->update_node_levels(ntree);
this->update_link_validation(ntree);
@@ -1015,86 +1016,70 @@ class NodeTreeMainUpdater {
return result;
}
- void ensure_tree_ref(bNodeTree &ntree, std::unique_ptr<NodeTreeRef> &tree_ref)
- {
- if (!tree_ref) {
- tree_ref = std::make_unique<NodeTreeRef>(&ntree);
- }
- }
-
- void update_socket_link_and_use(const NodeTreeRef &tree)
+ void update_socket_link_and_use(bNodeTree &tree)
{
- for (const InputSocketRef *socket : tree.input_sockets()) {
- bNodeSocket *bsocket = socket->bsocket();
+ tree.ensure_topology_cache();
+ for (bNodeSocket *socket : tree.all_input_sockets()) {
if (socket->directly_linked_links().is_empty()) {
- bsocket->link = nullptr;
+ socket->link = nullptr;
}
else {
- bsocket->link = socket->directly_linked_links()[0]->blink();
+ socket->link = socket->directly_linked_links()[0];
}
}
this->update_socket_used_tags(tree);
}
- void update_socket_used_tags(const NodeTreeRef &tree)
+ void update_socket_used_tags(bNodeTree &tree)
{
- for (const SocketRef *socket : tree.sockets()) {
- bNodeSocket *bsocket = socket->bsocket();
- bsocket->flag &= ~SOCK_IN_USE;
- for (const LinkRef *link : socket->directly_linked_links()) {
+ tree.ensure_topology_cache();
+ for (bNodeSocket *socket : tree.all_sockets()) {
+ socket->flag &= ~SOCK_IN_USE;
+ for (const bNodeLink *link : socket->directly_linked_links()) {
if (!link->is_muted()) {
- bsocket->flag |= SOCK_IN_USE;
+ socket->flag |= SOCK_IN_USE;
break;
}
}
}
}
- void update_individual_nodes(bNodeTree &ntree, std::unique_ptr<NodeTreeRef> &tree_ref)
+ void update_individual_nodes(bNodeTree &ntree)
{
- /* Iterate over nodes instead of #NodeTreeRef, because the #tree_ref might be outdated after
- * some update functions. */
- LISTBASE_FOREACH (bNode *, bnode, &ntree.nodes) {
- this->ensure_tree_ref(ntree, tree_ref);
- const NodeRef &node = *tree_ref->find_node(*bnode);
- if (this->should_update_individual_node(node)) {
- const uint32_t old_changed_flag = ntree.runtime->changed_flag;
- ntree.runtime->changed_flag = NTREE_CHANGED_NOTHING;
-
- /* This may set #ntree.runtime->changed_flag which is detected below. */
- this->update_individual_node(node);
-
- if (ntree.runtime->changed_flag != NTREE_CHANGED_NOTHING) {
- /* The tree ref is outdated and needs to be rebuilt. Generally, only very few update
- * functions change the node. Typically zero or one nodes change after an update. */
- tree_ref.reset();
+ LISTBASE_FOREACH (bNode *, node, &ntree.nodes) {
+ nodeDeclarationEnsure(&ntree, node);
+ if (this->should_update_individual_node(ntree, *node)) {
+ bNodeType &ntype = *node->typeinfo;
+ if (ntype.group_update_func) {
+ ntype.group_update_func(&ntree, node);
+ }
+ if (ntype.updatefunc) {
+ ntype.updatefunc(&ntree, node);
}
- ntree.runtime->changed_flag |= old_changed_flag;
}
}
}
- bool should_update_individual_node(const NodeRef &node)
+ bool should_update_individual_node(const bNodeTree &ntree, const bNode &node)
{
- bNodeTree &ntree = *node.btree();
- bNode &bnode = *node.bnode();
if (ntree.runtime->changed_flag & NTREE_CHANGED_ANY) {
return true;
}
- if (bnode.runtime->changed_flag & NTREE_CHANGED_NODE_PROPERTY) {
+ if (node.runtime->changed_flag & NTREE_CHANGED_NODE_PROPERTY) {
return true;
}
if (ntree.runtime->changed_flag & NTREE_CHANGED_LINK) {
+ ntree.ensure_topology_cache();
/* Node groups currently always rebuilt their sockets when they are updated.
* So avoid calling the update method when no new link was added to it. */
- if (node.is_group_input_node()) {
- if (node.outputs().last()->is_directly_linked()) {
+ if (node.type == NODE_GROUP_INPUT) {
+ if (node.output_sockets().last()->is_directly_linked()) {
return true;
}
}
- else if (node.is_group_output_node()) {
- if (node.inputs().last()->is_directly_linked()) {
+ else if (node.type == NODE_GROUP_OUTPUT) {
+ if (node.input_sockets().last()->is_directly_linked()) {
return true;
}
}
@@ -1104,95 +1089,76 @@ class NodeTreeMainUpdater {
}
}
if (ntree.runtime->changed_flag & NTREE_CHANGED_INTERFACE) {
- if (node.is_group_input_node() || node.is_group_output_node()) {
+ if (ELEM(node.type, NODE_GROUP_INPUT, NODE_GROUP_OUTPUT)) {
return true;
}
}
return false;
}
- void update_individual_node(const NodeRef &node)
- {
- bNodeTree &ntree = *node.btree();
- bNode &bnode = *node.bnode();
- bNodeType &ntype = *bnode.typeinfo;
- if (ntype.group_update_func) {
- ntype.group_update_func(&ntree, &bnode);
- }
- if (ntype.updatefunc) {
- ntype.updatefunc(&ntree, &bnode);
- }
- }
-
- void update_internal_links(bNodeTree &ntree, std::unique_ptr<NodeTreeRef> &tree_ref)
+ void update_internal_links(bNodeTree &ntree)
{
- bool any_internal_links_updated = false;
- this->ensure_tree_ref(ntree, tree_ref);
- for (const NodeRef *node : tree_ref->nodes()) {
- if (!this->should_update_individual_node(*node)) {
+ bke::node_tree_runtime::AllowUsingOutdatedInfo allow_outdated_info{ntree};
+ ntree.ensure_topology_cache();
+ for (bNode *node : ntree.all_nodes()) {
+ if (!this->should_update_individual_node(ntree, *node)) {
continue;
}
/* Find all expected internal links. */
Vector<std::pair<bNodeSocket *, bNodeSocket *>> expected_internal_links;
- for (const OutputSocketRef *output_socket : node->outputs()) {
+ for (const bNodeSocket *output_socket : node->output_sockets()) {
if (!output_socket->is_available()) {
continue;
}
if (!output_socket->is_directly_linked()) {
continue;
}
- if (output_socket->bsocket()->flag & SOCK_NO_INTERNAL_LINK) {
+ if (output_socket->flag & SOCK_NO_INTERNAL_LINK) {
continue;
}
- const InputSocketRef *input_socket = this->find_internally_linked_input(output_socket);
+ const bNodeSocket *input_socket = this->find_internally_linked_input(output_socket);
if (input_socket != nullptr) {
- expected_internal_links.append({input_socket->bsocket(), output_socket->bsocket()});
+ expected_internal_links.append(
+ {const_cast<bNodeSocket *>(input_socket), const_cast<bNodeSocket *>(output_socket)});
}
}
- /* rebuilt internal links if they have changed. */
- if (node->internal_links().size() != expected_internal_links.size()) {
- this->update_internal_links_in_node(ntree, *node->bnode(), expected_internal_links);
- any_internal_links_updated = true;
+ /* Rebuilt internal links if they have changed. */
+ if (node->internal_links_span().size() != expected_internal_links.size()) {
+ this->update_internal_links_in_node(ntree, *node, expected_internal_links);
}
else {
for (auto &item : expected_internal_links) {
const bNodeSocket *from_socket = item.first;
const bNodeSocket *to_socket = item.second;
bool found = false;
- for (const InternalLinkRef *internal_link : node->internal_links()) {
- if (from_socket == internal_link->from().bsocket() &&
- to_socket == internal_link->to().bsocket()) {
+ for (const bNodeLink *internal_link : node->internal_links_span()) {
+ if (from_socket == internal_link->fromsock && to_socket == internal_link->tosock) {
found = true;
}
}
if (!found) {
- this->update_internal_links_in_node(ntree, *node->bnode(), expected_internal_links);
- any_internal_links_updated = true;
+ this->update_internal_links_in_node(ntree, *node, expected_internal_links);
break;
}
}
}
}
-
- if (any_internal_links_updated) {
- tree_ref.reset();
- }
}
- const InputSocketRef *find_internally_linked_input(const OutputSocketRef *output_socket)
+ const bNodeSocket *find_internally_linked_input(const bNodeSocket *output_socket)
{
- const InputSocketRef *selected_socket = nullptr;
+ const bNodeSocket *selected_socket = nullptr;
int selected_priority = -1;
bool selected_is_linked = false;
- for (const InputSocketRef *input_socket : output_socket->node().inputs()) {
+ for (const bNodeSocket *input_socket : output_socket->owner_node().input_sockets()) {
if (!input_socket->is_available()) {
continue;
}
- if (input_socket->bsocket()->flag & SOCK_NO_INTERNAL_LINK) {
+ if (input_socket->flag & SOCK_NO_INTERNAL_LINK) {
continue;
}
- const int priority = get_internal_link_type_priority(input_socket->bsocket()->typeinfo,
- output_socket->bsocket()->typeinfo);
+ const int priority = get_internal_link_type_priority(input_socket->typeinfo,
+ output_socket->typeinfo);
if (priority < 0) {
continue;
}
@@ -1227,23 +1193,12 @@ class NodeTreeMainUpdater {
BKE_ntree_update_tag_node_internal_link(&ntree, &node);
}
- void update_generic_callback(bNodeTree &ntree, std::unique_ptr<NodeTreeRef> &tree_ref)
+ void update_generic_callback(bNodeTree &ntree)
{
if (ntree.typeinfo->update == nullptr) {
return;
}
-
- /* Reset the changed_flag to allow detecting when the update callback changed the node tree. */
- const uint32_t old_changed_flag = ntree.runtime->changed_flag;
- ntree.runtime->changed_flag = NTREE_CHANGED_NOTHING;
-
ntree.typeinfo->update(&ntree);
-
- if (ntree.runtime->changed_flag != NTREE_CHANGED_NOTHING) {
- /* The tree ref is outdated and needs to be rebuilt. */
- tree_ref.reset();
- }
- ntree.runtime->changed_flag |= old_changed_flag;
}
void remove_unused_previews_when_necessary(bNodeTree &ntree)
@@ -1258,25 +1213,26 @@ class NodeTreeMainUpdater {
BKE_node_preview_remove_unused(&ntree);
}
- void propagate_runtime_flags(const NodeTreeRef &tree_ref)
+ void propagate_runtime_flags(const bNodeTree &ntree)
{
- bNodeTree &ntree = *tree_ref.btree();
+ ntree.ensure_topology_cache();
+
ntree.runtime->runtime_flag = 0;
if (ntree.type != NTREE_SHADER) {
return;
}
/* Check if a used node group has an animated image. */
- for (const NodeRef *group_node : tree_ref.nodes_by_type("NodeGroup")) {
- const bNodeTree *group = reinterpret_cast<bNodeTree *>(group_node->bnode()->id);
+ for (const bNode *group_node : ntree.nodes_by_type("ShaderNodeGroup")) {
+ const bNodeTree *group = reinterpret_cast<bNodeTree *>(group_node->id);
if (group != nullptr) {
ntree.runtime->runtime_flag |= group->runtime->runtime_flag;
}
}
/* Check if the tree itself has an animated image. */
for (const StringRefNull idname : {"ShaderNodeTexImage", "ShaderNodeTexEnvironment"}) {
- for (const NodeRef *node : tree_ref.nodes_by_type(idname)) {
- Image *image = reinterpret_cast<Image *>(node->bnode()->id);
+ for (const bNode *node : ntree.nodes_by_type(idname)) {
+ Image *image = reinterpret_cast<Image *>(node->id);
if (image != nullptr && BKE_image_is_animated(image)) {
ntree.runtime->runtime_flag |= NTREE_RUNTIME_FLAG_HAS_IMAGE_ANIMATION;
break;
@@ -1288,7 +1244,7 @@ class NodeTreeMainUpdater {
"ShaderNodeOutputLight",
"ShaderNodeOutputWorld",
"ShaderNodeOutputAOV"}) {
- const Span<const NodeRef *> nodes = tree_ref.nodes_by_type(idname);
+ const Span<const bNode *> nodes = ntree.nodes_by_type(idname);
if (!nodes.is_empty()) {
ntree.runtime->runtime_flag |= NTREE_RUNTIME_FLAG_HAS_MATERIAL_OUTPUT;
break;
@@ -1319,20 +1275,20 @@ class NodeTreeMainUpdater {
}
}
- bool check_if_output_changed(const NodeTreeRef &tree)
+ bool check_if_output_changed(const bNodeTree &tree)
{
- bNodeTree &btree = *tree.btree();
+ tree.ensure_topology_cache();
/* Compute a hash that represents the node topology connected to the output. This always has to
* be updated even if it is not used to detect changes right now. Otherwise
* #btree.runtime.output_topology_hash will go out of date. */
- const Vector<const SocketRef *> tree_output_sockets = this->find_output_sockets(tree);
- const uint32_t old_topology_hash = btree.runtime->output_topology_hash;
+ const Vector<const bNodeSocket *> tree_output_sockets = this->find_output_sockets(tree);
+ const uint32_t old_topology_hash = tree.runtime->output_topology_hash;
const uint32_t new_topology_hash = this->get_combined_socket_topology_hash(
tree, tree_output_sockets);
- btree.runtime->output_topology_hash = new_topology_hash;
+ tree.runtime->output_topology_hash = new_topology_hash;
- if (const AnimData *adt = BKE_animdata_from_id(&btree.id)) {
+ if (const AnimData *adt = BKE_animdata_from_id(&tree.id)) {
/* Drivers may copy values in the node tree around arbitrarily and may cause the output to
* change even if it wouldn't without drivers. Only some special drivers like `frame/5` can
* be used without causing updates all the time currently. In the future we could try to
@@ -1354,7 +1310,7 @@ class NodeTreeMainUpdater {
}
}
- if (btree.runtime->changed_flag & NTREE_CHANGED_ANY) {
+ if (tree.runtime->changed_flag & NTREE_CHANGED_ANY) {
return true;
}
@@ -1363,8 +1319,8 @@ class NodeTreeMainUpdater {
}
/* The topology hash can only be used when only topology-changing operations have been done. */
- if (btree.runtime->changed_flag ==
- (btree.runtime->changed_flag & (NTREE_CHANGED_LINK | NTREE_CHANGED_REMOVED_NODE))) {
+ if (tree.runtime->changed_flag ==
+ (tree.runtime->changed_flag & (NTREE_CHANGED_LINK | NTREE_CHANGED_REMOVED_NODE))) {
if (old_topology_hash == new_topology_hash) {
return false;
}
@@ -1377,15 +1333,15 @@ class NodeTreeMainUpdater {
return true;
}
- Vector<const SocketRef *> find_output_sockets(const NodeTreeRef &tree)
+ Vector<const bNodeSocket *> find_output_sockets(const bNodeTree &tree)
{
- Vector<const SocketRef *> sockets;
- for (const NodeRef *node : tree.nodes()) {
+ Vector<const bNodeSocket *> sockets;
+ for (const bNode *node : tree.all_nodes()) {
if (!this->is_output_node(*node)) {
continue;
}
- for (const InputSocketRef *socket : node->inputs()) {
- if (socket->idname() != "NodeSocketVirtual") {
+ for (const bNodeSocket *socket : node->input_sockets()) {
+ if (!STREQ(socket->idname, "NodeSocketVirtual")) {
sockets.append(socket);
}
}
@@ -1393,18 +1349,17 @@ class NodeTreeMainUpdater {
return sockets;
}
- bool is_output_node(const NodeRef &node) const
+ bool is_output_node(const bNode &node) const
{
- const bNode &bnode = *node.bnode();
- if (bnode.typeinfo->nclass == NODE_CLASS_OUTPUT) {
+ if (node.typeinfo->nclass == NODE_CLASS_OUTPUT) {
return true;
}
- if (bnode.type == NODE_GROUP_OUTPUT) {
+ if (node.type == NODE_GROUP_OUTPUT) {
return true;
}
/* Assume node groups without output sockets are outputs. */
- if (bnode.type == NODE_GROUP) {
- const bNodeTree *node_group = reinterpret_cast<const bNodeTree *>(bnode.id);
+ if (node.type == NODE_GROUP) {
+ const bNodeTree *node_group = reinterpret_cast<const bNodeTree *>(node.id);
if (node_group != nullptr &&
node_group->runtime->runtime_flag & NTREE_RUNTIME_FLAG_HAS_MATERIAL_OUTPUT) {
return true;
@@ -1417,10 +1372,10 @@ class NodeTreeMainUpdater {
* Computes a hash that changes when the node tree topology connected to an output node changes.
* Adding reroutes does not have an effect on the hash.
*/
- uint32_t get_combined_socket_topology_hash(const NodeTreeRef &tree,
- Span<const SocketRef *> sockets)
+ uint32_t get_combined_socket_topology_hash(const bNodeTree &tree,
+ Span<const bNodeSocket *> sockets)
{
- if (tree.has_link_cycles()) {
+ if (tree.has_link_cycle()) {
/* Return dummy value when the link has any cycles. The algorithm below could be improved to
* handle cycles more gracefully. */
return 0;
@@ -1433,29 +1388,28 @@ class NodeTreeMainUpdater {
return combined_hash;
}
- Array<uint32_t> get_socket_topology_hashes(const NodeTreeRef &tree,
- Span<const SocketRef *> sockets)
+ Array<uint32_t> get_socket_topology_hashes(const bNodeTree &tree,
+ Span<const bNodeSocket *> sockets)
{
- BLI_assert(!tree.has_link_cycles());
- Array<std::optional<uint32_t>> hash_by_socket_id(tree.sockets().size());
- Stack<const SocketRef *> sockets_to_check = sockets;
+ BLI_assert(!tree.has_link_cycle());
+ Array<std::optional<uint32_t>> hash_by_socket_id(tree.all_sockets().size());
+ Stack<const bNodeSocket *> sockets_to_check = sockets;
while (!sockets_to_check.is_empty()) {
- const SocketRef &in_out_socket = *sockets_to_check.peek();
- const NodeRef &node = in_out_socket.node();
+ const bNodeSocket &socket = *sockets_to_check.peek();
+ const bNode &node = socket.owner_node();
- if (hash_by_socket_id[in_out_socket.id()].has_value()) {
+ if (hash_by_socket_id[socket.index_in_tree()].has_value()) {
sockets_to_check.pop();
/* Socket is handled already. */
continue;
}
- if (in_out_socket.is_input()) {
+ if (socket.is_input()) {
/* For input sockets, first compute the hashes of all linked sockets. */
- const InputSocketRef &socket = in_out_socket.as_input();
bool all_origins_computed = true;
- for (const OutputSocketRef *origin_socket : socket.logically_linked_sockets()) {
- if (!hash_by_socket_id[origin_socket->id()].has_value()) {
+ for (const bNodeSocket *origin_socket : socket.logically_linked_sockets()) {
+ if (!hash_by_socket_id[origin_socket->index_in_tree()].has_value()) {
sockets_to_check.push(origin_socket);
all_origins_computed = false;
}
@@ -1465,22 +1419,21 @@ class NodeTreeMainUpdater {
}
/* When the hashes for the linked sockets are ready, combine them into a hash for the input
* socket. */
- const uint64_t socket_ptr = (uintptr_t)socket.bsocket();
+ const uint64_t socket_ptr = (uintptr_t)&socket;
uint32_t socket_hash = noise::hash(socket_ptr, socket_ptr >> 32);
- for (const OutputSocketRef *origin_socket : socket.logically_linked_sockets()) {
- const uint32_t origin_socket_hash = *hash_by_socket_id[origin_socket->id()];
+ for (const bNodeSocket *origin_socket : socket.logically_linked_sockets()) {
+ const uint32_t origin_socket_hash = *hash_by_socket_id[origin_socket->index_in_tree()];
socket_hash = noise::hash(socket_hash, origin_socket_hash);
}
- hash_by_socket_id[socket.id()] = socket_hash;
+ hash_by_socket_id[socket.index_in_tree()] = socket_hash;
sockets_to_check.pop();
}
else {
/* For output sockets, first compute the hashes of all available input sockets. */
- const OutputSocketRef &socket = in_out_socket.as_output();
bool all_available_inputs_computed = true;
- for (const InputSocketRef *input_socket : node.inputs()) {
+ for (const bNodeSocket *input_socket : node.input_sockets()) {
if (input_socket->is_available()) {
- if (!hash_by_socket_id[input_socket->id()].has_value()) {
+ if (!hash_by_socket_id[input_socket->index_in_tree()].has_value()) {
sockets_to_check.push(input_socket);
all_available_inputs_computed = false;
}
@@ -1491,25 +1444,25 @@ class NodeTreeMainUpdater {
}
/* When all input socket hashes have been computed, combine them into a hash for the output
* socket. */
- const uint64_t socket_ptr = (uintptr_t)socket.bsocket();
+ const uint64_t socket_ptr = (uintptr_t)&socket;
uint32_t socket_hash = noise::hash(socket_ptr, socket_ptr >> 32);
- for (const InputSocketRef *input_socket : node.inputs()) {
+ for (const bNodeSocket *input_socket : node.input_sockets()) {
if (input_socket->is_available()) {
- const uint32_t input_socket_hash = *hash_by_socket_id[input_socket->id()];
+ const uint32_t input_socket_hash = *hash_by_socket_id[input_socket->index_in_tree()];
socket_hash = noise::hash(socket_hash, input_socket_hash);
}
}
/* The Image Texture node has a special case. The behavior of the color output changes
* depending on whether the Alpha output is linked. */
- if (node.bnode()->type == SH_NODE_TEX_IMAGE && socket.index() == 0) {
- BLI_assert(socket.name() == "Color");
- const OutputSocketRef &alpha_socket = node.output(1);
- BLI_assert(alpha_socket.name() == "Alpha");
+ if (node.type == SH_NODE_TEX_IMAGE && socket.index() == 0) {
+ BLI_assert(STREQ(socket.name, "Color"));
+ const bNodeSocket &alpha_socket = node.output_socket(1);
+ BLI_assert(STREQ(alpha_socket.name, "Alpha"));
if (alpha_socket.is_directly_linked()) {
socket_hash = noise::hash(socket_hash);
}
}
- hash_by_socket_id[socket.id()] = socket_hash;
+ hash_by_socket_id[socket.index_in_tree()] = socket_hash;
sockets_to_check.pop();
}
}
@@ -1517,7 +1470,7 @@ class NodeTreeMainUpdater {
/* Create output array. */
Array<uint32_t> hashes(sockets.size());
for (const int i : sockets.index_range()) {
- hashes[i] = *hash_by_socket_id[sockets[i]->id()];
+ hashes[i] = *hash_by_socket_id[sockets[i]->index_in_tree()];
}
return hashes;
}
@@ -1526,37 +1479,34 @@ class NodeTreeMainUpdater {
* Returns true when any of the provided sockets changed its values. A change is detected by
* checking the #changed_flag on connected sockets and nodes.
*/
- bool check_if_socket_outputs_changed_based_on_flags(const NodeTreeRef &tree,
- Span<const SocketRef *> sockets)
+ bool check_if_socket_outputs_changed_based_on_flags(const bNodeTree &tree,
+ Span<const bNodeSocket *> sockets)
{
/* Avoid visiting the same socket twice when multiple links point to the same socket. */
- Array<bool> pushed_by_socket_id(tree.sockets().size(), false);
- Stack<const SocketRef *> sockets_to_check = sockets;
+ Array<bool> pushed_by_socket_id(tree.all_sockets().size(), false);
+ Stack<const bNodeSocket *> sockets_to_check = sockets;
- for (const SocketRef *socket : sockets) {
- pushed_by_socket_id[socket->id()] = true;
+ for (const bNodeSocket *socket : sockets) {
+ pushed_by_socket_id[socket->index_in_tree()] = true;
}
while (!sockets_to_check.is_empty()) {
- const SocketRef &in_out_socket = *sockets_to_check.pop();
- const NodeRef &node = in_out_socket.node();
- const bNode &bnode = *node.bnode();
- const bNodeSocket &bsocket = *in_out_socket.bsocket();
- if (bsocket.runtime->changed_flag != NTREE_CHANGED_NOTHING) {
+ const bNodeSocket &socket = *sockets_to_check.pop();
+ const bNode &node = socket.owner_node();
+ if (socket.runtime->changed_flag != NTREE_CHANGED_NOTHING) {
return true;
}
- if (bnode.runtime->changed_flag != NTREE_CHANGED_NOTHING) {
- const bool only_unused_internal_link_changed = (bnode.flag & NODE_MUTED) == 0 &&
- bnode.runtime->changed_flag ==
+ if (node.runtime->changed_flag != NTREE_CHANGED_NOTHING) {
+ const bool only_unused_internal_link_changed = !node.is_muted() &&
+ node.runtime->changed_flag ==
NTREE_CHANGED_INTERNAL_LINK;
if (!only_unused_internal_link_changed) {
return true;
}
}
- if (in_out_socket.is_input()) {
- const InputSocketRef &socket = in_out_socket.as_input();
- for (const OutputSocketRef *origin_socket : socket.logically_linked_sockets()) {
- bool &pushed = pushed_by_socket_id[origin_socket->id()];
+ if (socket.is_input()) {
+ for (const bNodeSocket *origin_socket : socket.logically_linked_sockets()) {
+ bool &pushed = pushed_by_socket_id[origin_socket->index_in_tree()];
if (!pushed) {
sockets_to_check.push(origin_socket);
pushed = true;
@@ -1564,10 +1514,9 @@ class NodeTreeMainUpdater {
}
}
else {
- const OutputSocketRef &socket = in_out_socket.as_output();
- for (const InputSocketRef *input_socket : node.inputs()) {
+ for (const bNodeSocket *input_socket : node.input_sockets()) {
if (input_socket->is_available()) {
- bool &pushed = pushed_by_socket_id[input_socket->id()];
+ bool &pushed = pushed_by_socket_id[input_socket->index_in_tree()];
if (!pushed) {
sockets_to_check.push(input_socket);
pushed = true;
@@ -1576,11 +1525,11 @@ class NodeTreeMainUpdater {
}
/* The Normal node has a special case, because the value stored in the first output socket
* is used as input in the node. */
- if (bnode.type == SH_NODE_NORMAL && socket.index() == 1) {
- BLI_assert(socket.name() == "Dot");
- const OutputSocketRef &normal_output = node.output(0);
- BLI_assert(normal_output.name() == "Normal");
- bool &pushed = pushed_by_socket_id[normal_output.id()];
+ if (node.type == SH_NODE_NORMAL && socket.index() == 1) {
+ BLI_assert(STREQ(socket.name, "Dot"));
+ const bNodeSocket &normal_output = node.output_socket(0);
+ BLI_assert(STREQ(normal_output.name, "Normal"));
+ bool &pushed = pushed_by_socket_id[normal_output.index_in_tree()];
if (!pushed) {
sockets_to_check.push(&normal_output);
pushed = true;
@@ -1654,6 +1603,11 @@ void BKE_ntree_update_tag_node_removed(bNodeTree *ntree)
add_tree_tag(ntree, NTREE_CHANGED_REMOVED_NODE);
}
+void BKE_ntree_update_tag_node_reordered(bNodeTree *ntree)
+{
+ add_tree_tag(ntree, NTREE_CHANGED_ANY);
+}
+
void BKE_ntree_update_tag_node_mute(bNodeTree *ntree, bNode *node)
{
add_node_tag(ntree, node, NTREE_CHANGED_NODE_PROPERTY);