diff options
author | Jacques Lucke <jacques@blender.org> | 2022-05-04 16:02:19 +0300 |
---|---|---|
committer | Jacques Lucke <jacques@blender.org> | 2022-05-04 16:02:19 +0300 |
commit | 54b293237ea92f29aef05268836cab54c420d7ad (patch) | |
tree | 294f260359e46eb49041e5bff97bcd4e268a74eb /source/blender/nodes | |
parent | d86d7c935ef3d44890fa71256c9045688fb3044a (diff) |
Fix T97375: changing node tree from Python is very slow
The issue was that the `NodeTreeRef` acceleration data structure was
rebuild much more often than necessary. That happened because the
Map Range node accidentally tagged the node tree for change even
though it did not actually change.
Differential Revision: https://developer.blender.org/D14842
Diffstat (limited to 'source/blender/nodes')
-rw-r--r-- | source/blender/nodes/shader/nodes/node_shader_map_range.cc | 26 |
1 files changed, 17 insertions, 9 deletions
diff --git a/source/blender/nodes/shader/nodes/node_shader_map_range.cc b/source/blender/nodes/shader/nodes/node_shader_map_range.cc index 9ba9a279c57..a487e07bd5a 100644 --- a/source/blender/nodes/shader/nodes/node_shader_map_range.cc +++ b/source/blender/nodes/shader/nodes/node_shader_map_range.cc @@ -66,24 +66,32 @@ static void node_shader_update_map_range(bNodeTree *ntree, bNode *node) const CustomDataType data_type = static_cast<CustomDataType>(storage.data_type); const int type = (data_type == CD_PROP_FLOAT) ? SOCK_FLOAT : SOCK_VECTOR; - LISTBASE_FOREACH (bNodeSocket *, socket, &node->inputs) { - nodeSetSocketAvailability(ntree, socket, socket->type == type); - } + Array<bool> new_input_availability(BLI_listbase_count(&node->inputs)); + Array<bool> new_output_availability(BLI_listbase_count(&node->outputs)); - LISTBASE_FOREACH (bNodeSocket *, socket, &node->outputs) { - nodeSetSocketAvailability(ntree, socket, socket->type == type); + int index; + LISTBASE_FOREACH_INDEX (bNodeSocket *, socket, &node->inputs, index) { + new_input_availability[index] = socket->type == type; + } + LISTBASE_FOREACH_INDEX (bNodeSocket *, socket, &node->outputs, index) { + new_output_availability[index] = socket->type == type; } if (storage.interpolation_type != NODE_MAP_RANGE_STEPPED) { if (type == SOCK_FLOAT) { - bNodeSocket *sockSteps = (bNodeSocket *)BLI_findlink(&node->inputs, 5); - nodeSetSocketAvailability(ntree, sockSteps, false); + new_input_availability[5] = false; } else { - bNodeSocket *sockSteps = (bNodeSocket *)BLI_findlink(&node->inputs, 11); - nodeSetSocketAvailability(ntree, sockSteps, false); + new_input_availability[11] = false; } } + + LISTBASE_FOREACH_INDEX (bNodeSocket *, socket, &node->inputs, index) { + nodeSetSocketAvailability(ntree, socket, new_input_availability[index]); + } + LISTBASE_FOREACH_INDEX (bNodeSocket *, socket, &node->outputs, index) { + nodeSetSocketAvailability(ntree, socket, new_output_availability[index]); + } } static void node_shader_init_map_range(bNodeTree *UNUSED(ntree), bNode *node) |