griptape-nodes 0.64.11__py3-none-any.whl → 0.65.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. griptape_nodes/app/app.py +25 -5
  2. griptape_nodes/cli/commands/init.py +65 -54
  3. griptape_nodes/cli/commands/libraries.py +92 -85
  4. griptape_nodes/cli/commands/self.py +121 -0
  5. griptape_nodes/common/node_executor.py +2142 -101
  6. griptape_nodes/exe_types/base_iterative_nodes.py +1004 -0
  7. griptape_nodes/exe_types/connections.py +114 -19
  8. griptape_nodes/exe_types/core_types.py +225 -7
  9. griptape_nodes/exe_types/flow.py +3 -3
  10. griptape_nodes/exe_types/node_types.py +681 -225
  11. griptape_nodes/exe_types/param_components/README.md +414 -0
  12. griptape_nodes/exe_types/param_components/api_key_provider_parameter.py +200 -0
  13. griptape_nodes/exe_types/param_components/huggingface/huggingface_model_parameter.py +2 -0
  14. griptape_nodes/exe_types/param_components/huggingface/huggingface_repo_file_parameter.py +79 -5
  15. griptape_nodes/exe_types/param_types/parameter_button.py +443 -0
  16. griptape_nodes/machines/control_flow.py +84 -38
  17. griptape_nodes/machines/dag_builder.py +148 -70
  18. griptape_nodes/machines/parallel_resolution.py +61 -35
  19. griptape_nodes/machines/sequential_resolution.py +11 -113
  20. griptape_nodes/retained_mode/events/app_events.py +1 -0
  21. griptape_nodes/retained_mode/events/base_events.py +16 -13
  22. griptape_nodes/retained_mode/events/connection_events.py +3 -0
  23. griptape_nodes/retained_mode/events/execution_events.py +35 -0
  24. griptape_nodes/retained_mode/events/flow_events.py +15 -2
  25. griptape_nodes/retained_mode/events/library_events.py +347 -0
  26. griptape_nodes/retained_mode/events/node_events.py +48 -0
  27. griptape_nodes/retained_mode/events/os_events.py +86 -3
  28. griptape_nodes/retained_mode/events/project_events.py +15 -1
  29. griptape_nodes/retained_mode/events/workflow_events.py +48 -1
  30. griptape_nodes/retained_mode/griptape_nodes.py +6 -2
  31. griptape_nodes/retained_mode/managers/config_manager.py +10 -8
  32. griptape_nodes/retained_mode/managers/event_manager.py +168 -0
  33. griptape_nodes/retained_mode/managers/fitness_problems/libraries/__init__.py +2 -0
  34. griptape_nodes/retained_mode/managers/fitness_problems/libraries/old_xdg_location_warning_problem.py +43 -0
  35. griptape_nodes/retained_mode/managers/flow_manager.py +664 -123
  36. griptape_nodes/retained_mode/managers/library_manager.py +1142 -138
  37. griptape_nodes/retained_mode/managers/model_manager.py +2 -3
  38. griptape_nodes/retained_mode/managers/node_manager.py +148 -25
  39. griptape_nodes/retained_mode/managers/object_manager.py +3 -1
  40. griptape_nodes/retained_mode/managers/operation_manager.py +3 -1
  41. griptape_nodes/retained_mode/managers/os_manager.py +1158 -122
  42. griptape_nodes/retained_mode/managers/secrets_manager.py +2 -3
  43. griptape_nodes/retained_mode/managers/settings.py +21 -1
  44. griptape_nodes/retained_mode/managers/sync_manager.py +2 -3
  45. griptape_nodes/retained_mode/managers/workflow_manager.py +358 -104
  46. griptape_nodes/retained_mode/retained_mode.py +3 -3
  47. griptape_nodes/traits/button.py +44 -2
  48. griptape_nodes/traits/file_system_picker.py +2 -2
  49. griptape_nodes/utils/file_utils.py +101 -0
  50. griptape_nodes/utils/git_utils.py +1236 -0
  51. griptape_nodes/utils/library_utils.py +122 -0
  52. {griptape_nodes-0.64.11.dist-info → griptape_nodes-0.65.1.dist-info}/METADATA +2 -1
  53. {griptape_nodes-0.64.11.dist-info → griptape_nodes-0.65.1.dist-info}/RECORD +55 -47
  54. {griptape_nodes-0.64.11.dist-info → griptape_nodes-0.65.1.dist-info}/WHEEL +1 -1
  55. {griptape_nodes-0.64.11.dist-info → griptape_nodes-0.65.1.dist-info}/entry_points.txt +0 -0
@@ -5,9 +5,8 @@ import logging
5
5
  from dataclasses import dataclass
6
6
  from typing import TYPE_CHECKING
7
7
 
8
- from griptape_nodes.exe_types.core_types import Parameter, ParameterTypeBuiltin
8
+ from griptape_nodes.exe_types.base_iterative_nodes import BaseIterativeStartNode
9
9
  from griptape_nodes.exe_types.node_types import (
10
- CONTROL_INPUT_PARAMETER,
11
10
  LOCAL_EXECUTION,
12
11
  BaseNode,
13
12
  NodeGroupNode,
@@ -28,6 +27,7 @@ from griptape_nodes.retained_mode.managers.node_manager import NodeManager
28
27
  from griptape_nodes.retained_mode.managers.settings import WorkflowExecutionMode
29
28
 
30
29
  if TYPE_CHECKING:
30
+ from griptape_nodes.exe_types.core_types import Parameter
31
31
  from griptape_nodes.exe_types.flow import ControlFlow
32
32
 
33
33
 
@@ -52,6 +52,7 @@ class ControlFlowContext:
52
52
  flow_name: str
53
53
  pickle_control_flow_result: bool
54
54
  end_node: BaseNode | None = None
55
+ is_isolated: bool
55
56
 
56
57
  def __init__(
57
58
  self,
@@ -60,12 +61,21 @@ class ControlFlowContext:
60
61
  *,
61
62
  execution_type: WorkflowExecutionMode = WorkflowExecutionMode.SEQUENTIAL,
62
63
  pickle_control_flow_result: bool = False,
64
+ is_isolated: bool = False,
63
65
  ) -> None:
64
66
  self.flow_name = flow_name
65
67
  if execution_type == WorkflowExecutionMode.PARALLEL:
66
68
  # Get the global DagBuilder from FlowManager
69
+ from griptape_nodes.machines.dag_builder import DagBuilder
70
+ from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
71
+
72
+ # Create isolated DagBuilder for independent subflows
73
+ if is_isolated:
74
+ dag_builder = DagBuilder()
75
+ logger.debug("Created isolated DagBuilder for flow '%s'", flow_name)
76
+ else:
77
+ dag_builder = GriptapeNodes.FlowManager().global_dag_builder
67
78
 
68
- dag_builder = GriptapeNodes.FlowManager().global_dag_builder
69
79
  self.resolution_machine = ParallelResolutionMachine(
70
80
  flow_name, max_nodes_in_parallel, dag_builder=dag_builder
71
81
  )
@@ -73,6 +83,7 @@ class ControlFlowContext:
73
83
  self.resolution_machine = SequentialResolutionMachine()
74
84
  self.current_nodes = []
75
85
  self.pickle_control_flow_result = pickle_control_flow_result
86
+ self.is_isolated = is_isolated
76
87
 
77
88
  def get_next_nodes(self, output_parameter: Parameter | None = None) -> list[NextNodeInfo]:
78
89
  """Get all next nodes from the current nodes.
@@ -85,24 +96,27 @@ class ControlFlowContext:
85
96
  if output_parameter is not None:
86
97
  # Get connected node from control flow
87
98
  node_connection = (
88
- GriptapeNodes.FlowManager().get_connections().get_connected_node(current_node, output_parameter)
99
+ GriptapeNodes.FlowManager()
100
+ .get_connections()
101
+ .get_connected_node(current_node, output_parameter, include_internal=False)
89
102
  )
90
103
  if node_connection is not None:
91
104
  node, entry_parameter = node_connection
92
105
  next_nodes.append(NextNodeInfo(node=node, entry_parameter=entry_parameter))
106
+ # Get next control output for this node
93
107
  else:
94
- # Get next control output for this node
95
-
96
- if (
97
- isinstance(current_node, NodeGroupNode)
98
- and current_node.get_parameter_value(current_node.execution_environment.name) != LOCAL_EXECUTION
99
- ):
100
- next_output = self.get_next_control_output_for_non_local_execution(current_node)
101
- else:
102
- next_output = current_node.get_next_control_output()
108
+ next_output = current_node.get_next_control_output()
103
109
  if next_output is not None:
110
+ if isinstance(current_node, BaseIterativeStartNode):
111
+ if current_node.end_node is None:
112
+ msg = "Iterative start node has no end node"
113
+ raise ValueError(msg)
114
+ next_nodes.append(NextNodeInfo(node=current_node.end_node, entry_parameter=None))
115
+ continue
104
116
  node_connection = (
105
- GriptapeNodes.FlowManager().get_connections().get_connected_node(current_node, next_output)
117
+ GriptapeNodes.FlowManager()
118
+ .get_connections()
119
+ .get_connected_node(current_node, next_output, include_internal=False)
106
120
  )
107
121
  if node_connection is not None:
108
122
  node, entry_parameter = node_connection
@@ -111,27 +125,13 @@ class ControlFlowContext:
111
125
  logger.debug("Control Flow: Node '%s' has no control output", current_node.name)
112
126
 
113
127
  # If no connections found, check execution queue
114
- if not next_nodes:
128
+ if not next_nodes and not self.is_isolated:
115
129
  node = GriptapeNodes.FlowManager().get_next_node_from_execution_queue()
116
130
  if node is not None:
117
131
  next_nodes.append(NextNodeInfo(node=node, entry_parameter=None))
118
132
 
119
133
  return next_nodes
120
134
 
121
- # Mirrored in @parallel_resolution.py. if you update one, update the other.
122
- def get_next_control_output_for_non_local_execution(self, node: BaseNode) -> Parameter | None:
123
- for param_name, value in node.parameter_output_values.items():
124
- parameter = node.get_parameter_by_name(param_name)
125
- if (
126
- parameter is not None
127
- and parameter.type == ParameterTypeBuiltin.CONTROL_TYPE
128
- and value == CONTROL_INPUT_PARAMETER
129
- ):
130
- # This is the parameter
131
- logger.debug("Control Flow: Found control output parameter '%s' for non-local execution", param_name)
132
- return parameter
133
- return None
134
-
135
135
  def reset(self, *, cancel: bool = False) -> None:
136
136
  if self.current_nodes is not None:
137
137
  for node in self.current_nodes:
@@ -317,7 +317,13 @@ class CompleteState(State):
317
317
 
318
318
  # MACHINE TIME!!!
319
319
  class ControlFlowMachine(FSM[ControlFlowContext]):
320
- def __init__(self, flow_name: str, *, pickle_control_flow_result: bool = False) -> None:
320
+ def __init__(
321
+ self,
322
+ flow_name: str,
323
+ *,
324
+ pickle_control_flow_result: bool = False,
325
+ is_isolated: bool = False,
326
+ ) -> None:
321
327
  execution_type = GriptapeNodes.ConfigManager().get_config_value(
322
328
  "workflow_execution_mode", default=WorkflowExecutionMode.SEQUENTIAL
323
329
  )
@@ -327,6 +333,7 @@ class ControlFlowMachine(FSM[ControlFlowContext]):
327
333
  max_nodes_in_parallel,
328
334
  execution_type=execution_type,
329
335
  pickle_control_flow_result=pickle_control_flow_result,
336
+ is_isolated=is_isolated,
330
337
  )
331
338
  super().__init__(context)
332
339
 
@@ -404,48 +411,87 @@ class ControlFlowMachine(FSM[ControlFlowContext]):
404
411
  ):
405
412
  await self.update()
406
413
 
407
- async def _process_nodes_for_dag(self, start_node: BaseNode) -> list[BaseNode]:
414
+ async def _process_nodes_for_dag(self, start_node: BaseNode) -> list[BaseNode]: # noqa: C901, PLR0912
408
415
  """Process data_nodes from the global queue to build unified DAG.
409
416
 
410
417
  This method identifies data_nodes in the execution queue and processes
411
418
  their dependencies into the DAG resolution machine.
419
+
420
+ For isolated subflows, this skips the global queue entirely and just
421
+ processes the start node, as subflows are self-contained.
412
422
  """
413
423
  if not isinstance(self._context.resolution_machine, ParallelResolutionMachine):
414
424
  return []
415
- # Get the global flow queue
416
- flow_manager = GriptapeNodes.FlowManager()
417
- dag_builder = flow_manager.global_dag_builder
425
+
426
+ # Use the DagBuilder from the resolution machine context (may be isolated or global)
427
+ dag_builder = self._context.resolution_machine.context.dag_builder
418
428
  if dag_builder is None:
419
429
  msg = "DAG builder is not initialized."
420
430
  raise ValueError(msg)
421
431
 
422
432
  # Build with the first node (it should already be the proxy if it's part of a group)
423
433
  dag_builder.add_node_with_dependencies(start_node, start_node.name)
424
- queue_items = list(flow_manager.global_flow_queue.queue)
434
+
435
+ # Check if we're using an isolated DagBuilder (for subflows)
436
+ flow_manager = GriptapeNodes.FlowManager()
437
+ node_manager = GriptapeNodes.NodeManager()
438
+ is_isolated = dag_builder is not flow_manager.global_dag_builder
439
+
440
+ if is_isolated:
441
+ # For isolated subflows, we don't process the global queue
442
+ # Just return the start node - the subflow is self-contained
443
+ logger.debug(
444
+ "Using isolated DagBuilder for flow '%s' - skipping global queue processing", self._context.flow_name
445
+ )
446
+ return [start_node]
447
+
448
+ # For main flows using the global DagBuilder, process the global queue
425
449
  start_nodes = [start_node]
426
450
  from griptape_nodes.retained_mode.managers.flow_manager import DagExecutionType
427
451
 
428
- # Find data_nodes and remove them from queue
452
+ # PASS 1: Process all control/start nodes first to build control flow graphs
453
+ queue_items = list(flow_manager.global_flow_queue.queue)
429
454
  for item in queue_items:
430
455
  if item.dag_execution_type in (DagExecutionType.CONTROL_NODE, DagExecutionType.START_NODE):
431
456
  node = item.node
432
457
  node.state = NodeResolutionState.UNRESOLVED
433
458
  # Use proxy node if this node is part of a group, otherwise use original node
434
459
  node_to_add = node
460
+
435
461
  # Only add if not already added (proxy might already be in DAG)
436
462
  if node_to_add.name not in dag_builder.node_to_reference:
437
463
  dag_builder.add_node_with_dependencies(node_to_add, node_to_add.name)
438
464
  if node_to_add not in start_nodes:
439
465
  start_nodes.append(node_to_add)
440
466
  flow_manager.global_flow_queue.queue.remove(item)
441
- elif item.dag_execution_type == DagExecutionType.DATA_NODE:
467
+
468
+ # PASS 2: Process all data nodes after control graphs are built
469
+ queue_items = list(flow_manager.global_flow_queue.queue)
470
+ for item in queue_items:
471
+ if item.dag_execution_type == DagExecutionType.DATA_NODE:
442
472
  node = item.node
443
473
  node.state = NodeResolutionState.UNRESOLVED
444
474
  # Use proxy node if this node is part of a group, otherwise use original node
445
475
  node_to_add = node
476
+ disconnected = True
446
477
  # Only add if not already added (proxy might already be in DAG)
447
478
  if node_to_add.name not in dag_builder.node_to_reference:
448
- dag_builder.add_node_with_dependencies(node_to_add, node_to_add.name)
479
+ # Now, we need to create the DAG, but it can't be queued or used until it's dependencies have been resolved.
480
+ # Figure out which graph the data node belongs to, if it belongs to a graph.
481
+ for graph_start_node_name in dag_builder.graphs:
482
+ graph_start_node = node_manager.get_node_by_name(graph_start_node_name)
483
+ correct_graph = flow_manager.is_node_connected(graph_start_node, node)
484
+ # This means this node is in the downstream connection of one of this graph.
485
+ if correct_graph:
486
+ # Is the node connected to a graph?
487
+ disconnected = False
488
+ if node.name not in dag_builder.start_node_candidates:
489
+ dag_builder.start_node_candidates[node.name] = set()
490
+ dag_builder.start_node_candidates[node.name].add(graph_start_node_name)
491
+ if disconnected:
492
+ # If the node is not connected to any graph, we can add it as it's own graph here.
493
+ # It will not cause any overlapping confusion with existing graphs.
494
+ dag_builder.add_node_with_dependencies(node_to_add, node_to_add.name)
449
495
  flow_manager.global_flow_queue.queue.remove(item)
450
496
 
451
497
  return start_nodes
@@ -6,8 +6,9 @@ from enum import StrEnum
6
6
  from typing import TYPE_CHECKING
7
7
 
8
8
  from griptape_nodes.common.directed_graph import DirectedGraph
9
+ from griptape_nodes.exe_types.base_iterative_nodes import BaseIterativeStartNode
9
10
  from griptape_nodes.exe_types.core_types import ParameterTypeBuiltin
10
- from griptape_nodes.exe_types.node_types import LOCAL_EXECUTION, NodeGroupNode, NodeResolutionState
11
+ from griptape_nodes.exe_types.node_types import NodeResolutionState
11
12
 
12
13
  if TYPE_CHECKING:
13
14
  import asyncio
@@ -44,58 +45,16 @@ class DagBuilder:
44
45
  graphs: dict[str, DirectedGraph] # Str is the name of the start node associated here.
45
46
  node_to_reference: dict[str, DagNode]
46
47
  graph_to_nodes: dict[str, set[str]] # Track which nodes belong to which graph
48
+ start_node_candidates: dict[str, set[str]]
47
49
 
48
50
  def __init__(self) -> None:
49
51
  self.graphs = {}
50
52
  self.node_to_reference: dict[str, DagNode] = {}
51
53
  self.graph_to_nodes = {}
52
-
53
- @staticmethod
54
- def _should_use_parent_group(node: BaseNode) -> bool:
55
- """Check if a node's parent group should be used for DAG edges.
56
-
57
- Returns True if the node has a parent NodeGroupNode that is NOT in LOCAL_EXECUTION mode.
58
- In LOCAL_EXECUTION mode, groups are transparent and children are treated as separate nodes.
59
- """
60
- if not isinstance(node.parent_group, NodeGroupNode):
61
- return False
62
- parent_execution_env = node.parent_group.get_parameter_value(node.parent_group.execution_environment.name)
63
- return parent_execution_env != LOCAL_EXECUTION
64
-
65
- def _get_node_for_dag_edge(self, node: BaseNode, graph: DirectedGraph, graph_name: str) -> BaseNode:
66
- """Get the node to use for DAG edges - either the node itself or its parent group.
67
-
68
- Args:
69
- node: The original node
70
- graph: The graph being built
71
- graph_name: Name of the graph for tracking
72
-
73
- Returns:
74
- The node or parent group to use in DAG edges
75
- """
76
- if self._should_use_parent_group(node):
77
- parent_group = node.parent_group
78
- if isinstance(parent_group, NodeGroupNode):
79
- self._ensure_group_node_in_dag(parent_group, graph, graph_name)
80
- return parent_group
81
- return node
82
-
83
- def _ensure_group_node_in_dag(self, group_node: NodeGroupNode, graph: DirectedGraph, graph_name: str) -> None:
84
- """Ensure a NodeGroupNode is added to the DAG if not already present.
85
-
86
- Args:
87
- group_node: The NodeGroupNode to add
88
- graph: The graph to add it to
89
- graph_name: Name of the graph for tracking
90
- """
91
- if group_node.name not in self.node_to_reference:
92
- dag_node = DagNode(node_reference=group_node, node_state=NodeState.WAITING)
93
- self.node_to_reference[group_node.name] = dag_node
94
- graph.add_node(node_for_adding=group_node.name)
95
- self.graph_to_nodes[graph_name].add(group_node.name)
54
+ self.start_node_candidates = {}
96
55
 
97
56
  # Complex with the inner recursive method, but it needs connections and added_nodes.
98
- def add_node_with_dependencies(self, node: BaseNode, graph_name: str = "default") -> list[BaseNode]: # noqa: C901
57
+ def add_node_with_dependencies(self, node: BaseNode, graph_name: str = "default") -> list[BaseNode]:
99
58
  """Add node and all its dependencies to DAG. Returns list of added nodes."""
100
59
  from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
101
60
 
@@ -115,6 +74,14 @@ class DagBuilder:
115
74
 
116
75
  if current_node.name in self.node_to_reference:
117
76
  return
77
+ # Add current node to tracking
78
+ dag_node = DagNode(node_reference=current_node, node_state=NodeState.WAITING)
79
+ self.node_to_reference[current_node.name] = dag_node
80
+ added_nodes.append(current_node)
81
+
82
+ # Add to graph
83
+ graph.add_node(node_for_adding=current_node.name)
84
+ self.graph_to_nodes[graph_name].add(current_node.name)
118
85
 
119
86
  # Check if we should ignore dependencies (for special nodes like output_selector)
120
87
  ignore_data_dependencies = hasattr(current_node, "ignore_dependencies")
@@ -129,7 +96,7 @@ class DagBuilder:
129
96
  if ignore_data_dependencies:
130
97
  continue
131
98
 
132
- upstream_connection = connections.get_connected_node(current_node, param)
99
+ upstream_connection = connections.get_connected_node(current_node, param, include_internal=False)
133
100
  if not upstream_connection:
134
101
  continue
135
102
 
@@ -139,30 +106,11 @@ class DagBuilder:
139
106
  if upstream_node.state == NodeResolutionState.RESOLVED:
140
107
  continue
141
108
 
142
- # Check for internal group connections - traverse but don't add edge
143
- is_internal_connection = (
144
- self._should_use_parent_group(current_node)
145
- and upstream_node.parent_group == current_node.parent_group
146
- )
147
-
148
109
  # Recursively add upstream node
149
110
  _add_node_recursive(upstream_node, visited, graph)
150
111
 
151
- # Add edge unless it's an internal group connection
152
- if not is_internal_connection:
153
- upstream_for_edge = self._get_node_for_dag_edge(upstream_node, graph, graph_name)
154
- current_for_edge = self._get_node_for_dag_edge(current_node, graph, graph_name)
155
- graph.add_edge(upstream_for_edge.name, current_for_edge.name)
156
-
157
- # Always add current node to tracking (even if parent group is used for edges)
158
- dag_node = DagNode(node_reference=current_node, node_state=NodeState.WAITING)
159
- self.node_to_reference[current_node.name] = dag_node
160
- added_nodes.append(current_node)
161
-
162
- # Add to graph if not using parent group
163
- if not self._should_use_parent_group(current_node):
164
- graph.add_node(node_for_adding=current_node.name)
165
- self.graph_to_nodes[graph_name].add(current_node.name)
112
+ # Add edge from upstream to current
113
+ graph.add_edge(upstream_node.name, current_node.name)
166
114
 
167
115
  _add_node_recursive(node, set(), graph)
168
116
 
@@ -193,6 +141,7 @@ class DagBuilder:
193
141
  self.graphs.clear()
194
142
  self.node_to_reference.clear()
195
143
  self.graph_to_nodes.clear()
144
+ self.start_node_candidates.clear()
196
145
 
197
146
  def can_queue_control_node(self, node: DagNode) -> bool:
198
147
  if len(self.graphs) == 1:
@@ -203,7 +152,8 @@ class DagBuilder:
203
152
  connections = GriptapeNodes.FlowManager().get_connections()
204
153
 
205
154
  control_connections = self.get_number_incoming_control_connections(node.node_reference, connections)
206
- if control_connections <= 1:
155
+ # If no control connections, we can queue this! Don't worry about this.
156
+ if control_connections == 0:
207
157
  return True
208
158
 
209
159
  for graph in self.graphs.values():
@@ -239,10 +189,119 @@ class DagBuilder:
239
189
  # Find the parameter to check if it's a control type
240
190
  param = node.get_parameter_by_name(param_name)
241
191
  if param and ParameterTypeBuiltin.CONTROL_TYPE.value in param.input_types:
242
- control_connection_count += len(connection_ids)
192
+ # Skip connections from end node or itself if this is a BaseIterativeStartNode
193
+ if isinstance(node, BaseIterativeStartNode):
194
+ for connection_id in connection_ids:
195
+ if connection_id in connections.connections:
196
+ connection = connections.connections[connection_id]
197
+ source_node = connection.source_node
198
+ # Skip if connection is from end node or itself
199
+ if source_node in (node.end_node, node):
200
+ continue
201
+ control_connection_count += 1
202
+ else:
203
+ control_connection_count += len(connection_ids)
243
204
 
244
205
  return control_connection_count
245
206
 
207
+ @staticmethod
208
+ def collect_nodes_in_forward_control_path(
209
+ start_node: BaseNode, end_node: BaseNode, connections: Connections
210
+ ) -> set[str]:
211
+ """Collect all nodes in the forward control path from start_node to end_node.
212
+
213
+ Args:
214
+ start_node: The node to start traversal from
215
+ end_node: The node to stop traversal at (inclusive)
216
+ connections: The connections manager
217
+
218
+ Returns:
219
+ Set of node names in the control path from start to end (inclusive)
220
+ """
221
+ nodes_in_path: set[str] = set()
222
+ to_visit = [start_node]
223
+ visited = set()
224
+
225
+ while to_visit:
226
+ current_node = to_visit.pop(0)
227
+
228
+ if current_node.name in visited:
229
+ continue
230
+ visited.add(current_node.name)
231
+
232
+ # Add to our collection
233
+ nodes_in_path.add(current_node.name)
234
+
235
+ # Stop if we've reached the end node
236
+ if current_node == end_node:
237
+ continue
238
+
239
+ # Find all outgoing control connections
240
+ if current_node.name in connections.outgoing_index:
241
+ for param_name, connection_ids in connections.outgoing_index[current_node.name].items():
242
+ param = current_node.get_parameter_by_name(param_name)
243
+ if param and param.output_type == ParameterTypeBuiltin.CONTROL_TYPE.value:
244
+ for connection_id in connection_ids:
245
+ if connection_id in connections.connections:
246
+ connection = connections.connections[connection_id]
247
+ next_node = connection.target_node
248
+ if next_node.name not in visited and not connection.is_node_group_internal:
249
+ to_visit.append(next_node)
250
+
251
+ return nodes_in_path
252
+
253
+ @staticmethod
254
+ def collect_data_dependencies_for_node(
255
+ node: BaseNode, connections: Connections, nodes_to_exclude: set[str], visited: set[str]
256
+ ) -> set[str]:
257
+ """Collect data dependencies for a node recursively.
258
+
259
+ Args:
260
+ node: The node to collect dependencies for
261
+ connections: The connections manager
262
+ nodes_to_exclude: Set of nodes to exclude (e.g., nodes already in control flow)
263
+ visited: Set of already visited dependency nodes (modified in place)
264
+
265
+ Returns:
266
+ Set of dependency node names
267
+ """
268
+ if node.name in visited:
269
+ return set()
270
+
271
+ visited.add(node.name)
272
+ dependencies: set[str] = set()
273
+
274
+ # Check for ignore_dependencies attribute (like output_selector)
275
+ ignore_data_dependencies = hasattr(node, "ignore_dependencies")
276
+ if ignore_data_dependencies:
277
+ return dependencies
278
+
279
+ # Process each parameter looking for data dependencies
280
+ for param in node.parameters:
281
+ # Skip control parameters
282
+ if param.type == ParameterTypeBuiltin.CONTROL_TYPE:
283
+ continue
284
+
285
+ # Get upstream data connection
286
+ upstream_connection = connections.get_connected_node(node, param, include_internal=False)
287
+ if upstream_connection:
288
+ upstream_node, _ = upstream_connection
289
+
290
+ # Skip if already resolved
291
+ if upstream_node.state == NodeResolutionState.RESOLVED:
292
+ continue
293
+
294
+ # Only add if it's not in the exclusion set
295
+ if upstream_node.name not in nodes_to_exclude:
296
+ dependencies.add(upstream_node.name)
297
+ # Recursively collect dependencies of this dependency
298
+ sub_deps = DagBuilder.collect_data_dependencies_for_node(
299
+ upstream_node, connections, nodes_to_exclude, visited
300
+ )
301
+ dependencies.update(sub_deps)
302
+
303
+ return dependencies
304
+
246
305
  def _is_node_in_forward_path(
247
306
  self, start_node: BaseNode, target_node: BaseNode, connections: Connections, visited: set[str] | None = None
248
307
  ) -> bool:
@@ -282,3 +341,22 @@ class DagBuilder:
282
341
  for node_name in self.graph_to_nodes[graph_name]:
283
342
  self.node_to_reference.pop(node_name, None)
284
343
  self.graph_to_nodes.pop(graph_name, None)
344
+
345
+ def remove_graph_from_dependencies(self) -> list[str]:
346
+ # Check all start node candidates and return those whose dependent graphs are all empty
347
+ start_nodes = []
348
+ # copy because we will be removing as iterating.
349
+ for start_node_name, graph_deps in self.start_node_candidates.copy().items():
350
+ # Check if all graphs this start node depends on are now empty
351
+ all_deps_empty = True
352
+ for graph_deps_name in graph_deps:
353
+ # Check if this graph exists and has nodes
354
+ if graph_deps_name in self.graphs and len(self.graphs[graph_deps_name].nodes()) > 0:
355
+ all_deps_empty = False
356
+ break
357
+
358
+ # If all dependent graphs are empty, this start node can be queued
359
+ if all_deps_empty:
360
+ del self.start_node_candidates[start_node_name]
361
+ start_nodes.append(start_node_name)
362
+ return start_nodes