griptape-nodes 0.55.1__py3-none-any.whl → 0.56.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. griptape_nodes/app/app.py +10 -15
  2. griptape_nodes/app/watch.py +35 -67
  3. griptape_nodes/bootstrap/utils/__init__.py +1 -0
  4. griptape_nodes/bootstrap/utils/python_subprocess_executor.py +122 -0
  5. griptape_nodes/bootstrap/workflow_executors/local_session_workflow_executor.py +418 -0
  6. griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +37 -8
  7. griptape_nodes/bootstrap/workflow_executors/subprocess_workflow_executor.py +326 -0
  8. griptape_nodes/bootstrap/workflow_executors/utils/__init__.py +1 -0
  9. griptape_nodes/bootstrap/workflow_executors/utils/subprocess_script.py +51 -0
  10. griptape_nodes/bootstrap/workflow_publishers/__init__.py +1 -0
  11. griptape_nodes/bootstrap/workflow_publishers/local_workflow_publisher.py +43 -0
  12. griptape_nodes/bootstrap/workflow_publishers/subprocess_workflow_publisher.py +84 -0
  13. griptape_nodes/bootstrap/workflow_publishers/utils/__init__.py +1 -0
  14. griptape_nodes/bootstrap/workflow_publishers/utils/subprocess_script.py +54 -0
  15. griptape_nodes/cli/commands/engine.py +4 -15
  16. griptape_nodes/cli/commands/init.py +88 -0
  17. griptape_nodes/cli/commands/models.py +2 -0
  18. griptape_nodes/cli/main.py +6 -1
  19. griptape_nodes/cli/shared.py +1 -0
  20. griptape_nodes/exe_types/core_types.py +130 -0
  21. griptape_nodes/exe_types/node_types.py +125 -13
  22. griptape_nodes/machines/control_flow.py +10 -0
  23. griptape_nodes/machines/dag_builder.py +21 -2
  24. griptape_nodes/machines/parallel_resolution.py +25 -10
  25. griptape_nodes/node_library/workflow_registry.py +73 -3
  26. griptape_nodes/retained_mode/events/agent_events.py +2 -0
  27. griptape_nodes/retained_mode/events/base_events.py +18 -17
  28. griptape_nodes/retained_mode/events/execution_events.py +15 -3
  29. griptape_nodes/retained_mode/events/flow_events.py +63 -7
  30. griptape_nodes/retained_mode/events/mcp_events.py +363 -0
  31. griptape_nodes/retained_mode/events/node_events.py +3 -4
  32. griptape_nodes/retained_mode/events/resource_events.py +290 -0
  33. griptape_nodes/retained_mode/events/workflow_events.py +57 -2
  34. griptape_nodes/retained_mode/griptape_nodes.py +17 -1
  35. griptape_nodes/retained_mode/managers/agent_manager.py +67 -4
  36. griptape_nodes/retained_mode/managers/event_manager.py +31 -13
  37. griptape_nodes/retained_mode/managers/flow_manager.py +731 -33
  38. griptape_nodes/retained_mode/managers/library_manager.py +15 -23
  39. griptape_nodes/retained_mode/managers/mcp_manager.py +364 -0
  40. griptape_nodes/retained_mode/managers/model_manager.py +184 -83
  41. griptape_nodes/retained_mode/managers/node_manager.py +15 -4
  42. griptape_nodes/retained_mode/managers/os_manager.py +118 -1
  43. griptape_nodes/retained_mode/managers/resource_components/__init__.py +1 -0
  44. griptape_nodes/retained_mode/managers/resource_components/capability_field.py +41 -0
  45. griptape_nodes/retained_mode/managers/resource_components/comparator.py +18 -0
  46. griptape_nodes/retained_mode/managers/resource_components/resource_instance.py +236 -0
  47. griptape_nodes/retained_mode/managers/resource_components/resource_type.py +79 -0
  48. griptape_nodes/retained_mode/managers/resource_manager.py +306 -0
  49. griptape_nodes/retained_mode/managers/resource_types/__init__.py +1 -0
  50. griptape_nodes/retained_mode/managers/resource_types/cpu_resource.py +108 -0
  51. griptape_nodes/retained_mode/managers/resource_types/os_resource.py +87 -0
  52. griptape_nodes/retained_mode/managers/settings.py +45 -0
  53. griptape_nodes/retained_mode/managers/sync_manager.py +10 -3
  54. griptape_nodes/retained_mode/managers/workflow_manager.py +447 -263
  55. griptape_nodes/traits/multi_options.py +5 -1
  56. griptape_nodes/traits/options.py +10 -2
  57. {griptape_nodes-0.55.1.dist-info → griptape_nodes-0.56.1.dist-info}/METADATA +2 -2
  58. {griptape_nodes-0.55.1.dist-info → griptape_nodes-0.56.1.dist-info}/RECORD +60 -37
  59. {griptape_nodes-0.55.1.dist-info → griptape_nodes-0.56.1.dist-info}/WHEEL +1 -1
  60. {griptape_nodes-0.55.1.dist-info → griptape_nodes-0.56.1.dist-info}/entry_points.txt +0 -0
@@ -18,6 +18,7 @@ from griptape_nodes.retained_mode.events.base_events import (
18
18
  from griptape_nodes.retained_mode.events.execution_events import (
19
19
  CurrentControlNodeEvent,
20
20
  CurrentDataNodeEvent,
21
+ InvolvedNodesEvent,
21
22
  NodeResolvedEvent,
22
23
  ParameterValueUpdateEvent,
23
24
  )
@@ -184,19 +185,22 @@ class ExecuteDagState(State):
184
185
  context: ParallelResolutionContext, node: BaseNode, network_name: str, flow_manager: FlowManager
185
186
  ) -> bool:
186
187
  """Check if control flow processing should be skipped."""
188
+ # Get network once to avoid duplicate lookups
189
+ if context.dag_builder is None:
190
+ msg = "DAG builder is not initialized"
191
+ raise ValueError(msg)
192
+ network = context.dag_builder.graphs.get(network_name, None)
193
+ if network is None:
194
+ msg = f"Network {network_name} not found in DAG builder"
195
+ raise ValueError(msg)
187
196
  if flow_manager.global_single_node_resolution:
197
+ # Clean up nodes from emptied graphs in single node resolution mode
198
+ if len(network) == 0 and context.dag_builder is not None:
199
+ context.dag_builder.cleanup_empty_graph_nodes(network_name)
200
+ ExecuteDagState._emit_involved_nodes_update(context)
188
201
  return True
189
202
 
190
- if context.dag_builder is not None:
191
- network = context.dag_builder.graphs.get(network_name, None)
192
- if network is not None and len(network) > 0:
193
- return True
194
-
195
- if node.stop_flow:
196
- node.stop_flow = False
197
- return True
198
-
199
- return False
203
+ return bool(len(network) > 0 or node.stop_flow)
200
204
 
201
205
  @staticmethod
202
206
  def _process_next_control_node(
@@ -229,6 +233,17 @@ class ExecuteDagState(State):
229
233
  )
230
234
  ExecuteDagState._add_and_queue_nodes(context, next_node, network_name)
231
235
 
236
+ @staticmethod
237
+ def _emit_involved_nodes_update(context: ParallelResolutionContext) -> None:
238
+ """Emit update of involved nodes based on current DAG state."""
239
+ if context.dag_builder is not None:
240
+ involved_nodes = list(context.node_to_reference.keys())
241
+ GriptapeNodes.EventManager().put_event(
242
+ ExecutionGriptapeNodeEvent(
243
+ wrapped_event=ExecutionEvent(payload=InvolvedNodesEvent(involved_nodes=involved_nodes))
244
+ )
245
+ )
246
+
232
247
  @staticmethod
233
248
  def _add_and_queue_nodes(context: ParallelResolutionContext, next_node: BaseNode, network_name: str) -> None:
234
249
  """Add nodes to DAG and queue them if ready."""
@@ -1,19 +1,45 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import json
4
+ import logging
3
5
  from datetime import datetime # noqa: TC003 (can't put into type checking block as Pydantic model relies on it)
4
6
  from pathlib import Path
5
- from typing import ClassVar
7
+ from typing import Any, ClassVar
6
8
 
7
- from pydantic import BaseModel, Field
9
+ from pydantic import BaseModel, Field, field_serializer, field_validator
8
10
 
9
11
  from griptape_nodes.node_library.library_registry import (
10
12
  LibraryNameAndVersion, # noqa: TC001 (putting this into type checking causes it to not be defined)
11
13
  )
12
14
  from griptape_nodes.utils.metaclasses import SingletonMeta
13
15
 
16
+ logger = logging.getLogger("griptape_nodes")
17
+
18
+ # Type aliases for clarity
19
+ type NodeName = str
20
+ type ParameterName = str
21
+ type ParameterAttribute = str
22
+ type ParameterMinimalDict = dict[ParameterAttribute, Any]
23
+ type NodeParametersMapping = dict[NodeName, dict[ParameterName, ParameterMinimalDict]]
24
+
25
+
26
+ class WorkflowShape(BaseModel):
27
+ """This structure reflects the input and output shapes extracted from StartNodes and EndNodes inside of the workflow.
28
+
29
+ A workflow may have multiple StartNodes and multiple EndNodes, each contributing their parameters
30
+ to the overall workflow shape.
31
+
32
+ Structure is:
33
+ - inputs: {start_node_name: {param_name: param_minimal_dict}}
34
+ - outputs: {end_node_name: {param_name: param_minimal_dict}}
35
+ """
36
+
37
+ inputs: NodeParametersMapping = Field(default_factory=dict)
38
+ outputs: NodeParametersMapping = Field(default_factory=dict)
39
+
14
40
 
15
41
  class WorkflowMetadata(BaseModel):
16
- LATEST_SCHEMA_VERSION: ClassVar[str] = "0.7.0"
42
+ LATEST_SCHEMA_VERSION: ClassVar[str] = "0.8.0"
17
43
 
18
44
  name: str
19
45
  schema_version: str
@@ -27,6 +53,50 @@ class WorkflowMetadata(BaseModel):
27
53
  creation_date: datetime | None = Field(default=None)
28
54
  last_modified_date: datetime | None = Field(default=None)
29
55
  branched_from: str | None = Field(default=None)
56
+ workflow_shape: WorkflowShape | None = Field(default=None)
57
+
58
+ @field_serializer("workflow_shape")
59
+ def serialize_workflow_shape(self, workflow_shape: WorkflowShape | None) -> str | None:
60
+ """Serialize WorkflowShape as JSON string to avoid TOML serialization issues.
61
+
62
+ The WorkflowShape contains deeply nested dictionaries with None values that are
63
+ meaningful data (e.g., default_value: None). TOML's nested table format creates
64
+ unreadable output and tomlkit fails on None values in nested structures.
65
+ JSON preserves None as null and keeps the data compact and readable.
66
+ """
67
+ if workflow_shape is None:
68
+ return None
69
+ # Use json.dumps to preserve None values as null, which TOML can handle
70
+ return json.dumps(workflow_shape.model_dump(), separators=(",", ":"))
71
+
72
+ @field_validator("workflow_shape", mode="before")
73
+ @classmethod
74
+ def validate_workflow_shape(cls, value: Any) -> WorkflowShape | None:
75
+ """Deserialize WorkflowShape from JSON string during TOML loading.
76
+
77
+ When loading workflow metadata from TOML files, the workflow_shape field
78
+ is stored as a JSON string that needs to be converted back to a WorkflowShape
79
+ object. This validator handles the expected input formats:
80
+ - JSON strings (from TOML deserialization)
81
+ - WorkflowShape objects (from direct Python construction)
82
+ - None values (workflows without Start/End nodes)
83
+
84
+ If JSON deserialization fails, logs a warning and returns None for graceful
85
+ degradation, consistent with other metadata parsing failures in this codebase.
86
+ """
87
+ if value is None:
88
+ return None
89
+ if isinstance(value, WorkflowShape):
90
+ return value
91
+ if isinstance(value, str):
92
+ try:
93
+ data = json.loads(value)
94
+ return WorkflowShape(**data)
95
+ except (json.JSONDecodeError, TypeError, ValueError) as e:
96
+ logger.error("Failed to deserialize workflow_shape from JSON: %s", e)
97
+ return None
98
+ # Unexpected type - let Pydantic's normal validation handle it
99
+ return value
30
100
 
31
101
 
32
102
  class WorkflowRegistry(metaclass=SingletonMeta):
@@ -29,12 +29,14 @@ class RunAgentRequest(RequestPayload):
29
29
  Args:
30
30
  input: Text input to send to the agent
31
31
  url_artifacts: List of URL artifacts to include with the request
32
+ additional_mcp_servers: List of additional MCP server names to include
32
33
 
33
34
  Results: RunAgentResultStarted -> RunAgentResultSuccess (with output) | RunAgentResultFailure (execution error)
34
35
  """
35
36
 
36
37
  input: str
37
38
  url_artifacts: list[RunAgentRequestArtifact]
39
+ additional_mcp_servers: list[str] = field(default_factory=list)
38
40
 
39
41
 
40
42
  @dataclass
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import json
4
4
  import logging
5
5
  from abc import ABC, abstractmethod
6
- from dataclasses import asdict, dataclass, field, is_dataclass
6
+ from dataclasses import asdict, dataclass, field, fields, is_dataclass
7
7
  from typing import TYPE_CHECKING, Any, ClassVar, TypeVar
8
8
 
9
9
  from griptape.artifacts import BaseArtifact
@@ -389,6 +389,21 @@ class EventResult[P: RequestPayload, R: ResultPayload](BaseEvent, ABC):
389
389
  bool: True if success, False if failure
390
390
  """
391
391
 
392
+ @classmethod
393
+ def _create_payload_instance(cls, payload_type: type, payload_data: dict[str, Any]) -> Any:
394
+ """Create a payload instance from data, handling dataclass init=False fields."""
395
+ if is_dataclass(payload_type):
396
+ # Filter out fields that have init=False to avoid TypeError
397
+ init_fields = {f.name for f in fields(payload_type) if f.init}
398
+ filtered_data = {k: v for k, v in payload_data.items() if k in init_fields}
399
+ return payload_type(**filtered_data)
400
+ if issubclass(payload_type, BaseModel):
401
+ return payload_type.model_validate(payload_data)
402
+ instance = payload_type()
403
+ for key, value in payload_data.items():
404
+ setattr(instance, key, value)
405
+ return instance
406
+
392
407
  @classmethod
393
408
  def from_dict( # pyright: ignore[reportIncompatibleMethodOverride]
394
409
  cls, data: builtins.dict[str, Any], req_payload_type: type[P], res_payload_type: type[R]
@@ -403,28 +418,14 @@ class EventResult[P: RequestPayload, R: ResultPayload](BaseEvent, ABC):
403
418
 
404
419
  # Process request payload
405
420
  if req_payload_type:
406
- if is_dataclass(req_payload_type):
407
- request_payload = req_payload_type(**request_data)
408
- elif issubclass(req_payload_type, BaseModel):
409
- request_payload = req_payload_type.model_validate(request_data)
410
- else:
411
- request_payload = req_payload_type()
412
- for key, value in request_data.items():
413
- setattr(request_payload, key, value)
421
+ request_payload = cls._create_payload_instance(req_payload_type, request_data)
414
422
  else:
415
423
  msg = f"Cannot create {cls.__name__} without a request payload type"
416
424
  raise ValueError(msg)
417
425
 
418
426
  # Process result payload
419
427
  if res_payload_type:
420
- if is_dataclass(res_payload_type):
421
- result_payload = res_payload_type(**result_data)
422
- elif issubclass(res_payload_type, BaseModel):
423
- result_payload = res_payload_type.model_validate(result_data)
424
- else:
425
- result_payload = res_payload_type()
426
- for key, value in result_data.items():
427
- setattr(result_payload, key, value)
428
+ result_payload = cls._create_payload_instance(res_payload_type, result_data)
428
429
  else:
429
430
  msg = f"Cannot create {cls.__name__} without a result payload type"
430
431
  raise ValueError(msg)
@@ -4,6 +4,7 @@ from typing import Any
4
4
  from griptape_nodes.retained_mode.events.base_events import (
5
5
  ExecutionPayload,
6
6
  RequestPayload,
7
+ ResultDetails,
7
8
  ResultPayloadFailure,
8
9
  ResultPayloadSuccess,
9
10
  WorkflowAlteredMixin,
@@ -225,12 +226,10 @@ class GetFlowStateResultSuccess(WorkflowNotAlteredMixin, ResultPayloadSuccess):
225
226
  Args:
226
227
  control_node: Name of the current control node (if any)
227
228
  resolving_node: Name of the node currently being resolved (if any)
228
- involved_nodes: Names of nodes that are queued to be executed or have been executed in the current run.
229
229
  """
230
230
 
231
231
  control_nodes: list[str] | None
232
232
  resolving_node: list[str] | None
233
- involved_nodes: list[str] | None
234
233
 
235
234
 
236
235
  @dataclass
@@ -309,7 +308,8 @@ class ControlFlowResolvedEvent(ExecutionPayload):
309
308
  @dataclass
310
309
  @PayloadRegistry.register
311
310
  class ControlFlowCancelledEvent(ExecutionPayload):
312
- pass
311
+ result_details: ResultDetails | str | None = None
312
+ exception: Exception | None = None
313
313
 
314
314
 
315
315
  @dataclass
@@ -348,6 +348,18 @@ class NodeFinishProcessEvent(ExecutionPayload):
348
348
  node_name: str
349
349
 
350
350
 
351
+ @dataclass
352
+ @PayloadRegistry.register
353
+ class InvolvedNodesEvent(ExecutionPayload):
354
+ """Event indicating which nodes are involved in the current execution.
355
+
356
+ For parallel resolution: Dynamic list based on DAG builder state
357
+ For control flow/sequential: All nodes when started, empty when complete
358
+ """
359
+
360
+ involved_nodes: list[str]
361
+
362
+
351
363
  @dataclass
352
364
  @PayloadRegistry.register
353
365
  class GriptapeEvent(ExecutionPayload):
@@ -1,7 +1,8 @@
1
1
  from dataclasses import dataclass
2
2
  from typing import Any
3
3
 
4
- from griptape_nodes.node_library.library_registry import LibraryNameAndVersion
4
+ from griptape_nodes.exe_types.node_types import NodeDependencies
5
+ from griptape_nodes.node_library.workflow_registry import WorkflowShape
5
6
  from griptape_nodes.retained_mode.events.base_events import (
6
7
  RequestPayload,
7
8
  ResultPayloadFailure,
@@ -185,8 +186,6 @@ class SerializedFlowCommands:
185
186
  Useful for save/load, copy/paste, etc.
186
187
 
187
188
  Attributes:
188
- node_libraries_used (set[LibraryNameAndVersion]): Set of libraries and versions used by the nodes,
189
- including those in child flows.
190
189
  flow_initialization_command (CreateFlowRequest | ImportWorkflowAsReferencedSubFlowRequest | None): Command to initialize the flow that contains all of this.
191
190
  Can be CreateFlowRequest for standalone flows, ImportWorkflowAsReferencedSubFlowRequest for referenced workflows,
192
191
  or None to deserialize into whatever Flow is in the Current Context.
@@ -199,8 +198,9 @@ class SerializedFlowCommands:
199
198
  set_parameter_value_commands (dict[SerializedNodeCommands.NodeUUID, list[SerializedNodeCommands.IndirectSetParameterValueCommand]]): List of commands
200
199
  to set parameter values, keyed by node UUID, during deserialization.
201
200
  sub_flows_commands (list["SerializedFlowCommands"]): List of sub-flow commands. Cascades into sub-flows within this serialization.
202
- referenced_workflows (set[str]): Set of workflow file paths that are referenced by this flow and its sub-flows.
203
- Used for validation before deserialization to ensure all referenced workflows are available.
201
+ node_dependencies (NodeDependencies): Aggregated dependencies from all nodes in this flow and its sub-flows.
202
+ Includes referenced workflows, static files, Python imports, and libraries. Used for workflow packaging,
203
+ dependency resolution, and deployment planning.
204
204
  """
205
205
 
206
206
  @dataclass
@@ -221,7 +221,6 @@ class SerializedFlowCommands:
221
221
  target_node_uuid: SerializedNodeCommands.NodeUUID
222
222
  target_parameter_name: str
223
223
 
224
- node_libraries_used: set[LibraryNameAndVersion]
225
224
  flow_initialization_command: CreateFlowRequest | ImportWorkflowAsReferencedSubFlowRequest | None
226
225
  serialized_node_commands: list[SerializedNodeCommands]
227
226
  serialized_connections: list[IndirectConnectionSerialization]
@@ -231,7 +230,7 @@ class SerializedFlowCommands:
231
230
  ]
232
231
  set_lock_commands_per_node: dict[SerializedNodeCommands.NodeUUID, SetLockNodeStateRequest]
233
232
  sub_flows_commands: list["SerializedFlowCommands"]
234
- referenced_workflows: set[str]
233
+ node_dependencies: NodeDependencies
235
234
 
236
235
 
237
236
  @dataclass
@@ -382,3 +381,60 @@ class SetFlowMetadataResultSuccess(WorkflowAlteredMixin, ResultPayloadSuccess):
382
381
  @PayloadRegistry.register
383
382
  class SetFlowMetadataResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure):
384
383
  """Flow metadata update failed. Common causes: flow not found, no current context, invalid metadata."""
384
+
385
+
386
+ @dataclass
387
+ @PayloadRegistry.register
388
+ class PackageNodeAsSerializedFlowRequest(RequestPayload):
389
+ """Package a single node as a complete flow with artificial start and end nodes.
390
+
391
+ Creates a serialized flow where:
392
+ - Start node has output parameters matching the packaged node's incoming connections
393
+ - End node has input parameters matching the packaged node's outgoing connections
394
+ - All connections are properly mapped through Start -> Node -> End
395
+
396
+ Use when: Creating reusable components, exporting nodes for templates,
397
+ building sub-workflows from existing nodes, creating packaged functionality.
398
+
399
+ Args:
400
+ node_name: Name of the node to package as a flow (None for current context node)
401
+ start_node_type: Node type name for the artificial start node (defaults to "StartFlow")
402
+ end_node_type: Node type name for the artificial end node (defaults to "EndFlow")
403
+ start_end_specific_library_name: Library name containing the start/end nodes (defaults to "Griptape Nodes Library")
404
+ entry_control_parameter_name: Name of the control parameter that the package node should be entered from. The generated start node will create a connection to this control parameter. NOTE: if no entry_control_parameter_name is specified, the package will be entered from the first available control input parameter.
405
+ output_parameter_prefix: Prefix for parameter names on the generated end node to avoid collisions (defaults to "packaged_node_")
406
+
407
+ Results: PackageNodeAsSerializedFlowResultSuccess (with serialized flow) | PackageNodeAsSerializedFlowResultFailure (node not found, packaging error)
408
+ """
409
+
410
+ # If None is passed, assumes we're packaging the node in the Current Context
411
+ node_name: str | None = None
412
+ start_node_type: str = "StartFlow"
413
+ end_node_type: str = "EndFlow"
414
+ start_end_specific_library_name: str = "Griptape Nodes Library"
415
+ entry_control_parameter_name: str | None = None
416
+ output_parameter_prefix: str = "packaged_node_"
417
+
418
+
419
+ @dataclass
420
+ @PayloadRegistry.register
421
+ class PackageNodeAsSerializedFlowResultSuccess(WorkflowNotAlteredMixin, ResultPayloadSuccess):
422
+ """Node successfully packaged as serialized flow.
423
+
424
+ Args:
425
+ serialized_flow_commands: The complete serialized flow with StartFlow, target node, and EndFlow
426
+ workflow_shape: The workflow shape defining inputs and outputs for external callers
427
+ """
428
+
429
+ serialized_flow_commands: SerializedFlowCommands
430
+ workflow_shape: WorkflowShape
431
+
432
+
433
+ @dataclass
434
+ @PayloadRegistry.register
435
+ class PackageNodeAsSerializedFlowResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure):
436
+ """Node packaging failed.
437
+
438
+ Common causes: node not found, no current context, serialization error,
439
+ connection analysis failed, node has no valid flow context.
440
+ """