griptape-nodes 0.58.0__py3-none-any.whl → 0.59.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. griptape_nodes/bootstrap/utils/python_subprocess_executor.py +2 -2
  2. griptape_nodes/bootstrap/workflow_executors/local_session_workflow_executor.py +0 -5
  3. griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +9 -5
  4. griptape_nodes/bootstrap/workflow_executors/subprocess_workflow_executor.py +0 -1
  5. griptape_nodes/bootstrap/workflow_executors/workflow_executor.py +1 -3
  6. griptape_nodes/bootstrap/workflow_publishers/local_workflow_publisher.py +1 -1
  7. griptape_nodes/cli/commands/init.py +53 -7
  8. griptape_nodes/cli/shared.py +1 -0
  9. griptape_nodes/common/node_executor.py +216 -40
  10. griptape_nodes/exe_types/core_types.py +46 -0
  11. griptape_nodes/exe_types/node_types.py +272 -0
  12. griptape_nodes/machines/control_flow.py +222 -16
  13. griptape_nodes/machines/dag_builder.py +212 -1
  14. griptape_nodes/machines/parallel_resolution.py +237 -4
  15. griptape_nodes/node_library/workflow_registry.py +1 -1
  16. griptape_nodes/retained_mode/events/execution_events.py +5 -4
  17. griptape_nodes/retained_mode/events/flow_events.py +17 -67
  18. griptape_nodes/retained_mode/events/parameter_events.py +122 -1
  19. griptape_nodes/retained_mode/managers/event_manager.py +17 -13
  20. griptape_nodes/retained_mode/managers/flow_manager.py +316 -573
  21. griptape_nodes/retained_mode/managers/library_manager.py +32 -20
  22. griptape_nodes/retained_mode/managers/model_manager.py +19 -8
  23. griptape_nodes/retained_mode/managers/node_manager.py +463 -3
  24. griptape_nodes/retained_mode/managers/object_manager.py +2 -2
  25. griptape_nodes/retained_mode/managers/workflow_manager.py +37 -46
  26. griptape_nodes/retained_mode/retained_mode.py +297 -3
  27. {griptape_nodes-0.58.0.dist-info → griptape_nodes-0.59.0.dist-info}/METADATA +3 -2
  28. {griptape_nodes-0.58.0.dist-info → griptape_nodes-0.59.0.dist-info}/RECORD +30 -30
  29. {griptape_nodes-0.58.0.dist-info → griptape_nodes-0.59.0.dist-info}/WHEEL +1 -1
  30. {griptape_nodes-0.58.0.dist-info → griptape_nodes-0.59.0.dist-info}/entry_points.txt +0 -0
@@ -7,7 +7,13 @@ from typing import TYPE_CHECKING
7
7
 
8
8
  from griptape_nodes.exe_types.connections import Direction
9
9
  from griptape_nodes.exe_types.core_types import Parameter, ParameterTypeBuiltin
10
- from griptape_nodes.exe_types.node_types import CONTROL_INPUT_PARAMETER, LOCAL_EXECUTION, BaseNode, NodeResolutionState
10
+ from griptape_nodes.exe_types.node_types import (
11
+ CONTROL_INPUT_PARAMETER,
12
+ LOCAL_EXECUTION,
13
+ BaseNode,
14
+ NodeGroupProxyNode,
15
+ NodeResolutionState,
16
+ )
11
17
  from griptape_nodes.exe_types.type_validator import TypeValidator
12
18
  from griptape_nodes.machines.dag_builder import NodeState
13
19
  from griptape_nodes.machines.fsm import FSM, State
@@ -56,6 +62,7 @@ class ParallelResolutionContext:
56
62
  task_to_node: dict[asyncio.Task, DagNode]
57
63
  dag_builder: DagBuilder | None
58
64
  last_resolved_node: BaseNode | None # Track the last node that was resolved
65
+ last_resolved_node: BaseNode | None # Track the last node that was resolved
59
66
 
60
67
  def __init__(
61
68
  self, flow_name: str, max_nodes_in_parallel: int | None = None, dag_builder: DagBuilder | None = None
@@ -66,6 +73,7 @@ class ParallelResolutionContext:
66
73
  self.workflow_state = WorkflowState.NO_ERROR
67
74
  self.dag_builder = dag_builder
68
75
  self.last_resolved_node = None
76
+ self.last_resolved_node = None
69
77
 
70
78
  # Initialize execution fields
71
79
  max_nodes_in_parallel = max_nodes_in_parallel if max_nodes_in_parallel is not None else 5
@@ -101,6 +109,7 @@ class ParallelResolutionContext:
101
109
  self.error_message = None
102
110
  self.task_to_node.clear()
103
111
  self.last_resolved_node = None
112
+ self.last_resolved_node = None
104
113
 
105
114
  # Clear DAG builder state to allow re-adding nodes on subsequent runs
106
115
  if self.dag_builder:
@@ -121,10 +130,17 @@ class ExecuteDagState(State):
121
130
  )
122
131
  return
123
132
 
133
+ # Check if this is a NodeGroupProxyNode - if so, handle grouped nodes
134
+
135
+ if isinstance(current_node, NodeGroupProxyNode):
136
+ await ExecuteDagState._handle_group_proxy_completion(context, current_node, network_name)
137
+ return
124
138
  # Publish all parameter updates.
125
139
  current_node.state = NodeResolutionState.RESOLVED
126
140
  # Track this as the last resolved node
127
141
  context.last_resolved_node = current_node
142
+ # Track this as the last resolved node
143
+ context.last_resolved_node = current_node
128
144
  # Serialization can be slow so only do it if the user wants debug details.
129
145
  if logger.level <= logging.DEBUG:
130
146
  logger.debug(
@@ -177,6 +193,170 @@ class ExecuteDagState(State):
177
193
  ExecuteDagState.get_next_control_graph(context, current_node, network_name)
178
194
 
179
195
  # Method is mirrored in Control_flow.py. If you update one, update the other.
196
+ @staticmethod
197
+ async def _handle_group_proxy_completion(
198
+ context: ParallelResolutionContext, proxy_node: BaseNode, network_name: str
199
+ ) -> None:
200
+ """Handle completion of a NodeGroupProxyNode by marking all grouped nodes as resolved.
201
+
202
+ When a NodeGroupProxyNode completes, all nodes in the group have been executed
203
+ in parallel by the NodeExecutor. This method marks each grouped node as RESOLVED
204
+ and emits NodeResolvedEvent for each one.
205
+
206
+ Args:
207
+ proxy_node: The NodeGroupProxyNode that completed execution
208
+ context: The ParallelResolutionContext
209
+ network_name: The name of the network
210
+ """
211
+ from griptape_nodes.exe_types.node_types import NodeGroupProxyNode
212
+
213
+ if not isinstance(proxy_node, NodeGroupProxyNode):
214
+ return
215
+
216
+ node_group = proxy_node.node_group_data
217
+
218
+ # Mark all grouped nodes as resolved and emit events
219
+ proxy_node.state = NodeResolutionState.RESOLVED
220
+ for grouped_node in node_group.nodes.values():
221
+ # Mark node as resolved
222
+ grouped_node.state = NodeResolutionState.RESOLVED
223
+
224
+ # Emit parameter update events for each output parameter
225
+ for parameter_name, value in grouped_node.parameter_output_values.items():
226
+ parameter = grouped_node.get_parameter_by_name(parameter_name)
227
+ if parameter is None:
228
+ logger.warning(
229
+ "Node '%s' in group '%s' has output parameter '%s' but parameter not found",
230
+ grouped_node.name,
231
+ node_group.group_id,
232
+ parameter_name,
233
+ )
234
+ continue
235
+
236
+ data_type = parameter.type
237
+ if data_type is None:
238
+ data_type = ParameterTypeBuiltin.NONE.value
239
+
240
+ await GriptapeNodes.EventManager().aput_event(
241
+ ExecutionGriptapeNodeEvent(
242
+ wrapped_event=ExecutionEvent(
243
+ payload=ParameterValueUpdateEvent(
244
+ node_name=grouped_node.name,
245
+ parameter_name=parameter_name,
246
+ data_type=data_type,
247
+ value=TypeValidator.safe_serialize(value),
248
+ )
249
+ ),
250
+ )
251
+ )
252
+
253
+ # Emit NodeResolvedEvent for the grouped node
254
+ library = LibraryRegistry.get_libraries_with_node_type(grouped_node.__class__.__name__)
255
+ library_name = library[0] if len(library) == 1 else None
256
+
257
+ await GriptapeNodes.EventManager().aput_event(
258
+ ExecutionGriptapeNodeEvent(
259
+ wrapped_event=ExecutionEvent(
260
+ payload=NodeResolvedEvent(
261
+ node_name=grouped_node.name,
262
+ parameter_output_values=TypeValidator.safe_serialize(grouped_node.parameter_output_values),
263
+ node_type=grouped_node.__class__.__name__,
264
+ specific_library_name=library_name,
265
+ )
266
+ )
267
+ )
268
+ )
269
+
270
+ # Cleanup: restore connections and deregister proxy
271
+ ExecuteDagState.get_next_control_graph(context, proxy_node, network_name)
272
+ ExecuteDagState._cleanup_proxy_node(proxy_node)
273
+
274
+ @staticmethod
275
+ def _cleanup_proxy_node(proxy_node: BaseNode) -> None:
276
+ """Clean up a NodeGroupProxyNode after execution completes.
277
+
278
+ Restores original connections from proxy back to grouped nodes and
279
+ deregisters the proxy node from ObjectManager.
280
+
281
+ Args:
282
+ proxy_node: The NodeGroupProxyNode to clean up
283
+ """
284
+ from griptape_nodes.exe_types.node_types import NodeGroupProxyNode
285
+
286
+ if not isinstance(proxy_node, NodeGroupProxyNode):
287
+ return
288
+
289
+ node_group = proxy_node.node_group_data
290
+ connections = GriptapeNodes.FlowManager().get_connections()
291
+
292
+ # Restore external incoming connections (proxy -> original target node)
293
+ for conn in node_group.external_incoming_connections:
294
+ conn_id = id(conn)
295
+
296
+ # Get original target node
297
+ original_target = node_group.original_incoming_targets.get(conn_id)
298
+ if original_target is None:
299
+ continue
300
+
301
+ # Create proxy parameter name to find it in the index
302
+ sanitized_node_name = original_target.name.replace(" ", "_")
303
+ proxy_param_name = f"{sanitized_node_name}__{conn.target_parameter.name}"
304
+
305
+ # Remove proxy from incoming index (using proxy parameter name)
306
+ if (
307
+ proxy_node.name in connections.incoming_index
308
+ and proxy_param_name in connections.incoming_index[proxy_node.name]
309
+ ):
310
+ connections.incoming_index[proxy_node.name][proxy_param_name].remove(conn_id)
311
+
312
+ # Restore connection to original target node
313
+ conn.target_node = original_target
314
+
315
+ # Add back to original target node's incoming index (using original parameter name)
316
+ connections.incoming_index.setdefault(conn.target_node.name, {}).setdefault(
317
+ conn.target_parameter.name, []
318
+ ).append(conn_id)
319
+
320
+ # Restore external outgoing connections (original source node -> proxy)
321
+ for conn in node_group.external_outgoing_connections:
322
+ conn_id = id(conn)
323
+
324
+ # Get original source node
325
+ original_source = node_group.original_outgoing_sources.get(conn_id)
326
+ if original_source is None:
327
+ continue
328
+
329
+ # Create proxy parameter name to find it in the index
330
+ sanitized_node_name = original_source.name.replace(" ", "_")
331
+ proxy_param_name = f"{sanitized_node_name}__{conn.source_parameter.name}"
332
+
333
+ # Remove proxy from outgoing index (using proxy parameter name)
334
+ if (
335
+ proxy_node.name in connections.outgoing_index
336
+ and proxy_param_name in connections.outgoing_index[proxy_node.name]
337
+ ):
338
+ connections.outgoing_index[proxy_node.name][proxy_param_name].remove(conn_id)
339
+
340
+ # Restore connection to original source node
341
+ conn.source_node = original_source
342
+
343
+ # Add back to original source node's outgoing index (using original parameter name)
344
+ connections.outgoing_index.setdefault(conn.source_node.name, {}).setdefault(
345
+ conn.source_parameter.name, []
346
+ ).append(conn_id)
347
+
348
+ # Deregister proxy node from ObjectManager
349
+ obj_manager = GriptapeNodes.ObjectManager()
350
+ if obj_manager.has_object_with_name(proxy_node.name):
351
+ del obj_manager._name_to_objects[proxy_node.name]
352
+
353
+ logger.debug(
354
+ "Cleaned up proxy node '%s' for group '%s' - restored %d connections",
355
+ proxy_node.name,
356
+ node_group.group_id,
357
+ len(node_group.external_incoming_connections) + len(node_group.external_outgoing_connections),
358
+ )
359
+
180
360
  @staticmethod
181
361
  def get_next_control_output_for_non_local_execution(node: BaseNode) -> Parameter | None:
182
362
  for param_name, value in node.parameter_output_values.items():
@@ -205,6 +385,10 @@ class ExecuteDagState(State):
205
385
  next_output = ExecuteDagState.get_next_control_output_for_non_local_execution(node)
206
386
  else:
207
387
  next_output = node.get_next_control_output()
388
+ if node.get_parameter_value(node.execution_environment.name) != LOCAL_EXECUTION:
389
+ next_output = ExecuteDagState.get_next_control_output_for_non_local_execution(node)
390
+ else:
391
+ next_output = node.get_next_control_output()
208
392
  if next_output is not None:
209
393
  ExecuteDagState._process_next_control_node(context, node, next_output, network_name, flow_manager)
210
394
 
@@ -241,6 +425,14 @@ class ExecuteDagState(State):
241
425
  """Process the next control node in the flow."""
242
426
  node_connection = flow_manager.get_connections().get_connected_node(node, next_output)
243
427
  if node_connection is not None:
428
+ next_node, next_parameter = node_connection
429
+ # Set entry control parameter
430
+ logger.debug(
431
+ "Parallel Resolution: Setting entry control parameter for node '%s' to '%s'",
432
+ next_node.name,
433
+ next_parameter.name if next_parameter else None,
434
+ )
435
+ next_node.set_entry_control_parameter(next_parameter)
244
436
  next_node, next_parameter = node_connection
245
437
  # Set entry control parameter
246
438
  logger.debug(
@@ -327,6 +519,7 @@ class ExecuteDagState(State):
327
519
  for parameter in current_node.parameters:
328
520
  # Get the connected upstream node for this parameter
329
521
  upstream_connection = connections.get_connected_node(current_node, parameter, direction=Direction.UPSTREAM)
522
+ upstream_connection = connections.get_connected_node(current_node, parameter, direction=Direction.UPSTREAM)
330
523
  if upstream_connection:
331
524
  upstream_node, upstream_parameter = upstream_connection
332
525
 
@@ -351,6 +544,20 @@ class ExecuteDagState(State):
351
544
  msg = f"Failed to set value for parameter '{parameter.name}' on node '{current_node.name}': {result.result_details}"
352
545
  logger.error(msg)
353
546
  raise RuntimeError(msg)
547
+ result = await GriptapeNodes.get_instance().ahandle_request(
548
+ SetParameterValueRequest(
549
+ parameter_name=parameter.name,
550
+ node_name=current_node.name,
551
+ value=output_value,
552
+ data_type=upstream_parameter.output_type,
553
+ incoming_connection_source_node_name=upstream_node.name,
554
+ incoming_connection_source_parameter_name=upstream_parameter.name,
555
+ )
556
+ )
557
+ if isinstance(result, SetParameterValueResultFailure):
558
+ msg = f"Failed to set value for parameter '{parameter.name}' on node '{current_node.name}': {result.result_details}"
559
+ logger.error(msg)
560
+ raise RuntimeError(msg)
354
561
 
355
562
  @staticmethod
356
563
  def build_node_states(context: ParallelResolutionContext) -> tuple[set[str], set[str], set[str]]:
@@ -411,8 +618,6 @@ class ExecuteDagState(State):
411
618
  @staticmethod
412
619
  async def execute_node(current_node: DagNode, semaphore: asyncio.Semaphore) -> None:
413
620
  async with semaphore:
414
- from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
415
-
416
621
  executor = GriptapeNodes.FlowManager().node_executor
417
622
  await executor.execute(current_node.node_reference)
418
623
 
@@ -431,7 +636,7 @@ class ExecuteDagState(State):
431
636
  return None
432
637
 
433
638
  @staticmethod
434
- async def on_update(context: ParallelResolutionContext) -> type[State] | None: # noqa: C901, PLR0911
639
+ async def on_update(context: ParallelResolutionContext) -> type[State] | None: # noqa: C901, PLR0911, PLR0915
435
640
  # Check if execution is paused
436
641
  if context.paused:
437
642
  return None
@@ -523,10 +728,25 @@ class ExecuteDagState(State):
523
728
  exc,
524
729
  )
525
730
 
731
+ dag_node = context.task_to_node.get(task)
732
+ node_name = dag_node.node_reference.name if dag_node else "Unknown"
733
+ node_type = dag_node.node_reference.__class__.__name__ if dag_node else "Unknown"
734
+
735
+ logger.exception(
736
+ "Task execution failed for node '%s' (type: %s) in flow '%s'. Exception: %s",
737
+ node_name,
738
+ node_type,
739
+ context.flow_name,
740
+ exc,
741
+ )
742
+
526
743
  context.task_to_node.pop(task)
527
744
  context.error_message = f"Task execution failed for node '{node_name}': {exc}"
528
745
  context.workflow_state = WorkflowState.ERRORED
529
746
  return ErrorState
747
+ context.error_message = f"Task execution failed for node '{node_name}': {exc}"
748
+ context.workflow_state = WorkflowState.ERRORED
749
+ return ErrorState
530
750
  context.task_to_node.pop(task)
531
751
  # Once a task has finished, loop back to the top.
532
752
  await ExecuteDagState.pop_done_states(context)
@@ -541,10 +761,23 @@ class ErrorState(State):
541
761
  async def on_enter(context: ParallelResolutionContext) -> type[State] | None:
542
762
  if context.error_message:
543
763
  logger.error("DAG execution error: %s", context.error_message)
764
+
765
+ # Clean up any proxy nodes that failed before completion
766
+ from griptape_nodes.exe_types.node_types import NodeGroupProxyNode
767
+
544
768
  for node in context.node_to_reference.values():
769
+ # Clean up proxy nodes that were processing or queued
770
+ if isinstance(node.node_reference, NodeGroupProxyNode) and node.node_state in (
771
+ NodeState.PROCESSING,
772
+ NodeState.QUEUED,
773
+ ):
774
+ logger.info("Cleaning up proxy node '%s' that failed during execution", node.node_reference.name)
775
+ ExecuteDagState._cleanup_proxy_node(node.node_reference)
776
+
545
777
  # Cancel all nodes that haven't yet begun processing.
546
778
  if node.node_state == NodeState.QUEUED:
547
779
  node.node_state = NodeState.CANCELED
780
+
548
781
  # Shut down and cancel all threads/tasks that haven't yet ran. Currently running ones will not be affected.
549
782
  # Cancel async tasks
550
783
  for task in list(context.task_to_node.keys()):
@@ -45,7 +45,7 @@ class WorkflowShape(BaseModel):
45
45
 
46
46
 
47
47
  class WorkflowMetadata(BaseModel):
48
- LATEST_SCHEMA_VERSION: ClassVar[str] = "0.9.0"
48
+ LATEST_SCHEMA_VERSION: ClassVar[str] = "0.10.0"
49
49
 
50
50
  name: str
51
51
  schema_version: str
@@ -227,12 +227,13 @@ class GetFlowStateResultSuccess(WorkflowNotAlteredMixin, ResultPayloadSuccess):
227
227
  """Flow execution state retrieved successfully.
228
228
 
229
229
  Args:
230
- control_node: Name of the current control node (if any)
231
- resolving_node: Name of the node currently being resolved (if any)
230
+ control_nodes: Name of the current control node (if any)
231
+ resolving_nodes: Name of the node currently being resolved (if any)
232
232
  """
233
233
 
234
- control_nodes: list[str] | None
235
- resolving_node: list[str] | None
234
+ control_nodes: list[str]
235
+ resolving_nodes: list[str]
236
+ involved_nodes: list[str]
236
237
 
237
238
 
238
239
  @dataclass
@@ -1,8 +1,14 @@
1
+ from __future__ import annotations
2
+
1
3
  from dataclasses import dataclass, field
2
- from typing import Any, NamedTuple
4
+ from typing import TYPE_CHECKING, Any, NamedTuple
5
+
6
+ if TYPE_CHECKING:
7
+ from griptape_nodes.exe_types.node_types import NodeDependencies, NodeGroupProxyNode
8
+ from griptape_nodes.node_library.workflow_registry import LibraryNameAndNodeType, WorkflowShape
9
+ from griptape_nodes.retained_mode.events.node_events import SerializedNodeCommands, SetLockNodeStateRequest
10
+ from griptape_nodes.retained_mode.events.workflow_events import ImportWorkflowAsReferencedSubFlowRequest
3
11
 
4
- from griptape_nodes.exe_types.node_types import NodeDependencies
5
- from griptape_nodes.node_library.workflow_registry import LibraryNameAndNodeType, WorkflowShape
6
12
  from griptape_nodes.retained_mode.events.base_events import (
7
13
  RequestPayload,
8
14
  ResultPayloadFailure,
@@ -10,9 +16,7 @@ from griptape_nodes.retained_mode.events.base_events import (
10
16
  WorkflowAlteredMixin,
11
17
  WorkflowNotAlteredMixin,
12
18
  )
13
- from griptape_nodes.retained_mode.events.node_events import SerializedNodeCommands, SetLockNodeStateRequest
14
19
  from griptape_nodes.retained_mode.events.payload_registry import PayloadRegistry
15
- from griptape_nodes.retained_mode.events.workflow_events import ImportWorkflowAsReferencedSubFlowRequest
16
20
 
17
21
 
18
22
  @dataclass(kw_only=True)
@@ -231,7 +235,7 @@ class SerializedFlowCommands:
231
235
  SerializedNodeCommands.NodeUUID, list[SerializedNodeCommands.IndirectSetParameterValueCommand]
232
236
  ]
233
237
  set_lock_commands_per_node: dict[SerializedNodeCommands.NodeUUID, SetLockNodeStateRequest]
234
- sub_flows_commands: list["SerializedFlowCommands"]
238
+ sub_flows_commands: list[SerializedFlowCommands]
235
239
  node_dependencies: NodeDependencies
236
240
  node_types_used: set[LibraryNameAndNodeType]
237
241
 
@@ -386,63 +390,6 @@ class SetFlowMetadataResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure
386
390
  """Flow metadata update failed. Common causes: flow not found, no current context, invalid metadata."""
387
391
 
388
392
 
389
- @dataclass
390
- @PayloadRegistry.register
391
- class PackageNodeAsSerializedFlowRequest(RequestPayload):
392
- """Package a single node as a complete flow with artificial start and end nodes.
393
-
394
- Creates a serialized flow where:
395
- - Start node has output parameters matching the packaged node's incoming connections
396
- - End node has input parameters matching the packaged node's outgoing connections
397
- - All connections are properly mapped through Start -> Node -> End
398
-
399
- Use when: Creating reusable components, exporting nodes for templates,
400
- building sub-workflows from existing nodes, creating packaged functionality.
401
-
402
- Args:
403
- node_name: Name of the node to package as a flow (None for current context node)
404
- start_node_type: Node type name for the artificial start node (defaults to "StartFlow")
405
- end_node_type: Node type name for the artificial end node (defaults to "EndFlow")
406
- start_end_specific_library_name: Library name containing the start/end nodes (defaults to "Griptape Nodes Library")
407
- entry_control_parameter_name: Name of the control parameter that the package node should be entered from. The generated start node will create a connection to this control parameter. NOTE: if no entry_control_parameter_name is specified, the package will be entered from the first available control input parameter.
408
- output_parameter_prefix: Prefix for parameter names on the generated end node to avoid collisions (defaults to "packaged_node_")
409
-
410
- Results: PackageNodeAsSerializedFlowResultSuccess (with serialized flow) | PackageNodeAsSerializedFlowResultFailure (node not found, packaging error)
411
- """
412
-
413
- # If None is passed, assumes we're packaging the node in the Current Context
414
- node_name: str | None = None
415
- start_node_type: str = "StartFlow"
416
- end_node_type: str = "EndFlow"
417
- start_end_specific_library_name: str = "Griptape Nodes Library"
418
- entry_control_parameter_name: str | None = None
419
- output_parameter_prefix: str = "packaged_node_"
420
-
421
-
422
- @dataclass
423
- @PayloadRegistry.register
424
- class PackageNodeAsSerializedFlowResultSuccess(WorkflowNotAlteredMixin, ResultPayloadSuccess):
425
- """Node successfully packaged as serialized flow.
426
-
427
- Args:
428
- serialized_flow_commands: The complete serialized flow with StartFlow, target node, and EndFlow
429
- workflow_shape: The workflow shape defining inputs and outputs for external callers
430
- """
431
-
432
- serialized_flow_commands: SerializedFlowCommands
433
- workflow_shape: WorkflowShape
434
-
435
-
436
- @dataclass
437
- @PayloadRegistry.register
438
- class PackageNodeAsSerializedFlowResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure):
439
- """Node packaging failed.
440
-
441
- Common causes: node not found, no current context, serialization error,
442
- connection analysis failed, node has no valid flow context.
443
- """
444
-
445
-
446
393
  # Type aliases for parameter mapping clarity
447
394
  SanitizedParameterName = str # What appears in the serialized flow
448
395
  OriginalNodeName = str # Original node name (can have spaces, dots, etc.)
@@ -479,8 +426,8 @@ class PackageNodesAsSerializedFlowRequest(RequestPayload):
479
426
 
480
427
  Args:
481
428
  node_names: List of node names to package as a flow (empty list will create StartFlow→EndFlow only with warning)
482
- start_node_type: Node type name for the artificial start node (defaults to "StartFlow")
483
- end_node_type: Node type name for the artificial end node (defaults to "EndFlow")
429
+ start_node_type: Node type name for the artificial start node (None or omitted defaults to "StartFlow")
430
+ end_node_type: Node type name for the artificial end node (None or omitted defaults to "EndFlow")
484
431
  start_end_specific_library_name: Library name containing the start/end nodes (defaults to "Griptape Nodes Library")
485
432
  entry_control_node_name: Name of the node that should receive the control flow entry (required if entry_control_parameter_name specified)
486
433
  entry_control_parameter_name: Name of the control parameter on the entry node (None for auto-detection of first available control parameter)
@@ -491,12 +438,15 @@ class PackageNodesAsSerializedFlowRequest(RequestPayload):
491
438
 
492
439
  # List of node names to package (empty list creates StartFlow→EndFlow only with warning)
493
440
  node_names: list[str] = field(default_factory=list)
494
- start_node_type: str = "StartFlow"
495
- end_node_type: str = "EndFlow"
441
+ start_node_type: str | None = None
442
+ end_node_type: str | None = None
496
443
  start_end_specific_library_name: str = "Griptape Nodes Library"
497
444
  entry_control_node_name: str | None = None
498
445
  entry_control_parameter_name: str | None = None
499
446
  output_parameter_prefix: str = "packaged_node_"
447
+ proxy_node: NodeGroupProxyNode | None = (
448
+ None # NodeGroupProxyNode if packaging nodes from a proxy, used to access original connections
449
+ )
500
450
 
501
451
 
502
452
  @dataclass
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from dataclasses import dataclass
4
- from typing import Any, NamedTuple
4
+ from typing import TYPE_CHECKING, Any, NamedTuple
5
5
 
6
6
  from pydantic import Field
7
7
 
@@ -16,6 +16,11 @@ from griptape_nodes.retained_mode.events.base_events import (
16
16
  )
17
17
  from griptape_nodes.retained_mode.events.payload_registry import PayloadRegistry
18
18
 
19
+ if TYPE_CHECKING:
20
+ from collections.abc import Callable
21
+
22
+ from griptape_nodes.retained_mode.events.connection_events import IncomingConnection, OutgoingConnection
23
+
19
24
 
20
25
  @dataclass
21
26
  @PayloadRegistry.register
@@ -542,7 +547,123 @@ class RenameParameterResultFailure(ResultPayloadFailure):
542
547
  """
543
548
 
544
549
 
550
+ @dataclass
551
+ @PayloadRegistry.register
552
+ class GetConnectionsForParameterRequest(RequestPayload):
553
+ """Get connections for a specific parameter on a node.
554
+
555
+ Use when: Checking if a parameter is connected, getting connection details for a parameter,
556
+ validating parameter connection state, building connection-aware UIs.
557
+
558
+ Args:
559
+ parameter_name: Name of the parameter to get connections for
560
+ node_name: Name of the node containing the parameter (None for current context)
561
+
562
+ Results: GetConnectionsForParameterResultSuccess (with connection details) | GetConnectionsForParameterResultFailure
563
+ """
564
+
565
+ parameter_name: str
566
+ # If node name is None, use the Current Context
567
+ node_name: str | None = None
568
+
569
+
570
+ @dataclass
571
+ @PayloadRegistry.register
572
+ class GetConnectionsForParameterResultSuccess(WorkflowNotAlteredMixin, ResultPayloadSuccess):
573
+ """Parameter connections retrieved successfully.
574
+
575
+ Args:
576
+ parameter_name: Name of the parameter
577
+ node_name: Name of the node containing the parameter
578
+ incoming_connections: List of incoming connections to this parameter
579
+ outgoing_connections: List of outgoing connections from this parameter
580
+ """
581
+
582
+ parameter_name: str
583
+ node_name: str
584
+ incoming_connections: list[IncomingConnection]
585
+ outgoing_connections: list[OutgoingConnection]
586
+
587
+ def has_incoming_connections(self) -> bool:
588
+ """Check if the parameter has any incoming connections."""
589
+ return len(self.incoming_connections) > 0
590
+
591
+ def has_outgoing_connections(self) -> bool:
592
+ """Check if the parameter has any outgoing connections."""
593
+ return len(self.outgoing_connections) > 0
594
+
595
+
596
+ @dataclass
597
+ @PayloadRegistry.register
598
+ class GetConnectionsForParameterResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure):
599
+ """Parameter connections retrieval failed. Common causes: node not found, parameter not found."""
600
+
601
+
545
602
  @dataclass
546
603
  @PayloadRegistry.register
547
604
  class RemoveElementEvent(ExecutionPayload):
548
605
  element_id: str
606
+
607
+
608
+ # Migration Events
609
+ @dataclass
610
+ class ConversionConfig:
611
+ """Configuration for parameter conversion using intermediate nodes.
612
+
613
+ Args:
614
+ library: Library containing the conversion node type
615
+ node_type: Type of node to create for conversion
616
+ input_parameter: Parameter name on the conversion node to connect input to
617
+ output_parameter: Parameter name on the conversion node to connect output from
618
+ additional_parameters: Additional parameters to set on the conversion node
619
+ offset_side: Reference side/position from target node (defaults to "left" for input, "right" for output)
620
+ offset_x: X offset for positioning the conversion node relative to target node
621
+ offset_y: Y offset for positioning the conversion node relative to target node
622
+ """
623
+
624
+ library: str
625
+ node_type: str
626
+ input_parameter: str
627
+ output_parameter: str
628
+ additional_parameters: dict[str, Any] | None = None
629
+ offset_side: str | None = None
630
+ offset_x: int = 0
631
+ offset_y: int = 0
632
+
633
+
634
+ @dataclass
635
+ @PayloadRegistry.register
636
+ class MigrateParameterRequest(RequestPayload):
637
+ """Request to migrate a parameter from one node to another with optional conversions.
638
+
639
+ Args:
640
+ source_node_name: Name of the source node
641
+ target_node_name: Name of the target node
642
+ source_parameter_name: Name of the parameter to migrate from
643
+ target_parameter_name: Name of the parameter to migrate to
644
+ input_conversion: Configuration for converting incoming connections
645
+ output_conversion: Configuration for converting outgoing connections
646
+ value_transform: Function to transform values when no connections exist
647
+ break_connections: If True, break any existing connections for the original parameter
648
+ """
649
+
650
+ source_node_name: str
651
+ target_node_name: str
652
+ source_parameter_name: str
653
+ target_parameter_name: str
654
+ input_conversion: ConversionConfig | None = None
655
+ output_conversion: ConversionConfig | None = None
656
+ value_transform: Callable | None = None
657
+ break_connections: bool = True
658
+
659
+
660
+ @dataclass
661
+ @PayloadRegistry.register
662
+ class MigrateParameterResultSuccess(WorkflowAlteredMixin, ResultPayloadSuccess):
663
+ """Parameter migration completed successfully."""
664
+
665
+
666
+ @dataclass
667
+ @PayloadRegistry.register
668
+ class MigrateParameterResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure):
669
+ """Parameter migration failed."""