griptape-nodes 0.57.0__py3-none-any.whl → 0.58.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- griptape_nodes/api_client/__init__.py +9 -0
- griptape_nodes/api_client/client.py +279 -0
- griptape_nodes/api_client/request_client.py +273 -0
- griptape_nodes/app/app.py +57 -150
- griptape_nodes/bootstrap/utils/python_subprocess_executor.py +1 -1
- griptape_nodes/bootstrap/workflow_executors/local_session_workflow_executor.py +22 -50
- griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +6 -1
- griptape_nodes/bootstrap/workflow_executors/subprocess_workflow_executor.py +27 -46
- griptape_nodes/bootstrap/workflow_executors/utils/subprocess_script.py +7 -0
- griptape_nodes/bootstrap/workflow_publishers/local_workflow_publisher.py +3 -1
- griptape_nodes/bootstrap/workflow_publishers/subprocess_workflow_publisher.py +3 -1
- griptape_nodes/bootstrap/workflow_publishers/utils/subprocess_script.py +16 -1
- griptape_nodes/common/node_executor.py +466 -0
- griptape_nodes/drivers/storage/base_storage_driver.py +0 -11
- griptape_nodes/drivers/storage/griptape_cloud_storage_driver.py +7 -25
- griptape_nodes/drivers/storage/local_storage_driver.py +2 -2
- griptape_nodes/exe_types/connections.py +37 -9
- griptape_nodes/exe_types/core_types.py +1 -1
- griptape_nodes/exe_types/node_types.py +115 -22
- griptape_nodes/machines/control_flow.py +48 -7
- griptape_nodes/machines/parallel_resolution.py +98 -29
- griptape_nodes/machines/sequential_resolution.py +61 -22
- griptape_nodes/node_library/library_registry.py +24 -1
- griptape_nodes/node_library/workflow_registry.py +38 -2
- griptape_nodes/retained_mode/events/execution_events.py +8 -1
- griptape_nodes/retained_mode/events/flow_events.py +90 -3
- griptape_nodes/retained_mode/events/node_events.py +17 -10
- griptape_nodes/retained_mode/events/workflow_events.py +5 -0
- griptape_nodes/retained_mode/griptape_nodes.py +16 -219
- griptape_nodes/retained_mode/managers/config_manager.py +0 -46
- griptape_nodes/retained_mode/managers/engine_identity_manager.py +225 -74
- griptape_nodes/retained_mode/managers/flow_manager.py +1276 -230
- griptape_nodes/retained_mode/managers/library_manager.py +7 -8
- griptape_nodes/retained_mode/managers/node_manager.py +197 -9
- griptape_nodes/retained_mode/managers/secrets_manager.py +26 -0
- griptape_nodes/retained_mode/managers/session_manager.py +264 -227
- griptape_nodes/retained_mode/managers/settings.py +4 -38
- griptape_nodes/retained_mode/managers/static_files_manager.py +3 -3
- griptape_nodes/retained_mode/managers/version_compatibility_manager.py +135 -6
- griptape_nodes/retained_mode/managers/workflow_manager.py +206 -78
- griptape_nodes/servers/mcp.py +23 -15
- griptape_nodes/utils/async_utils.py +36 -0
- griptape_nodes/utils/dict_utils.py +8 -2
- griptape_nodes/version_compatibility/versions/v0_39_0/modified_parameters_set_removal.py +11 -6
- griptape_nodes/version_compatibility/workflow_versions/v0_7_0/local_executor_argument_addition.py +12 -5
- {griptape_nodes-0.57.0.dist-info → griptape_nodes-0.58.0.dist-info}/METADATA +4 -3
- {griptape_nodes-0.57.0.dist-info → griptape_nodes-0.58.0.dist-info}/RECORD +49 -47
- {griptape_nodes-0.57.0.dist-info → griptape_nodes-0.58.0.dist-info}/WHEEL +1 -1
- griptape_nodes/retained_mode/utils/engine_identity.py +0 -245
- griptape_nodes/servers/ws_request_manager.py +0 -268
- {griptape_nodes-0.57.0.dist-info → griptape_nodes-0.58.0.dist-info}/entry_points.txt +0 -0
|
@@ -5,8 +5,9 @@ import logging
|
|
|
5
5
|
from enum import StrEnum
|
|
6
6
|
from typing import TYPE_CHECKING
|
|
7
7
|
|
|
8
|
-
from griptape_nodes.exe_types.
|
|
9
|
-
from griptape_nodes.exe_types.
|
|
8
|
+
from griptape_nodes.exe_types.connections import Direction
|
|
9
|
+
from griptape_nodes.exe_types.core_types import Parameter, ParameterTypeBuiltin
|
|
10
|
+
from griptape_nodes.exe_types.node_types import CONTROL_INPUT_PARAMETER, LOCAL_EXECUTION, BaseNode, NodeResolutionState
|
|
10
11
|
from griptape_nodes.exe_types.type_validator import TypeValidator
|
|
11
12
|
from griptape_nodes.machines.dag_builder import NodeState
|
|
12
13
|
from griptape_nodes.machines.fsm import FSM, State
|
|
@@ -22,7 +23,10 @@ from griptape_nodes.retained_mode.events.execution_events import (
|
|
|
22
23
|
NodeResolvedEvent,
|
|
23
24
|
ParameterValueUpdateEvent,
|
|
24
25
|
)
|
|
25
|
-
from griptape_nodes.retained_mode.events.parameter_events import
|
|
26
|
+
from griptape_nodes.retained_mode.events.parameter_events import (
|
|
27
|
+
SetParameterValueRequest,
|
|
28
|
+
SetParameterValueResultFailure,
|
|
29
|
+
)
|
|
26
30
|
from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
|
|
27
31
|
|
|
28
32
|
if TYPE_CHECKING:
|
|
@@ -51,6 +55,7 @@ class ParallelResolutionContext:
|
|
|
51
55
|
async_semaphore: asyncio.Semaphore
|
|
52
56
|
task_to_node: dict[asyncio.Task, DagNode]
|
|
53
57
|
dag_builder: DagBuilder | None
|
|
58
|
+
last_resolved_node: BaseNode | None # Track the last node that was resolved
|
|
54
59
|
|
|
55
60
|
def __init__(
|
|
56
61
|
self, flow_name: str, max_nodes_in_parallel: int | None = None, dag_builder: DagBuilder | None = None
|
|
@@ -60,6 +65,7 @@ class ParallelResolutionContext:
|
|
|
60
65
|
self.error_message = None
|
|
61
66
|
self.workflow_state = WorkflowState.NO_ERROR
|
|
62
67
|
self.dag_builder = dag_builder
|
|
68
|
+
self.last_resolved_node = None
|
|
63
69
|
|
|
64
70
|
# Initialize execution fields
|
|
65
71
|
max_nodes_in_parallel = max_nodes_in_parallel if max_nodes_in_parallel is not None else 5
|
|
@@ -94,6 +100,7 @@ class ParallelResolutionContext:
|
|
|
94
100
|
self.workflow_state = WorkflowState.NO_ERROR
|
|
95
101
|
self.error_message = None
|
|
96
102
|
self.task_to_node.clear()
|
|
103
|
+
self.last_resolved_node = None
|
|
97
104
|
|
|
98
105
|
# Clear DAG builder state to allow re-adding nodes on subsequent runs
|
|
99
106
|
if self.dag_builder:
|
|
@@ -116,6 +123,8 @@ class ExecuteDagState(State):
|
|
|
116
123
|
|
|
117
124
|
# Publish all parameter updates.
|
|
118
125
|
current_node.state = NodeResolutionState.RESOLVED
|
|
126
|
+
# Track this as the last resolved node
|
|
127
|
+
context.last_resolved_node = current_node
|
|
119
128
|
# Serialization can be slow so only do it if the user wants debug details.
|
|
120
129
|
if logger.level <= logging.DEBUG:
|
|
121
130
|
logger.debug(
|
|
@@ -167,6 +176,23 @@ class ExecuteDagState(State):
|
|
|
167
176
|
# Now the final thing to do, is to take their directed graph and update it.
|
|
168
177
|
ExecuteDagState.get_next_control_graph(context, current_node, network_name)
|
|
169
178
|
|
|
179
|
+
# Method is mirrored in Control_flow.py. If you update one, update the other.
|
|
180
|
+
@staticmethod
|
|
181
|
+
def get_next_control_output_for_non_local_execution(node: BaseNode) -> Parameter | None:
|
|
182
|
+
for param_name, value in node.parameter_output_values.items():
|
|
183
|
+
parameter = node.get_parameter_by_name(param_name)
|
|
184
|
+
if (
|
|
185
|
+
parameter is not None
|
|
186
|
+
and parameter.type == ParameterTypeBuiltin.CONTROL_TYPE.value
|
|
187
|
+
and value == CONTROL_INPUT_PARAMETER
|
|
188
|
+
):
|
|
189
|
+
# This is the parameter
|
|
190
|
+
logger.debug(
|
|
191
|
+
"Parallel Resolution: Found control output parameter '%s' for non-local execution", param_name
|
|
192
|
+
)
|
|
193
|
+
return parameter
|
|
194
|
+
return None
|
|
195
|
+
|
|
170
196
|
@staticmethod
|
|
171
197
|
def get_next_control_graph(context: ParallelResolutionContext, node: BaseNode, network_name: str) -> None:
|
|
172
198
|
"""Get next control flow nodes and add them to the DAG graph."""
|
|
@@ -175,8 +201,10 @@ class ExecuteDagState(State):
|
|
|
175
201
|
# Early returns for various conditions
|
|
176
202
|
if ExecuteDagState._should_skip_control_flow(context, node, network_name, flow_manager):
|
|
177
203
|
return
|
|
178
|
-
|
|
179
|
-
|
|
204
|
+
if node.get_parameter_value(node.execution_environment.name) != LOCAL_EXECUTION:
|
|
205
|
+
next_output = ExecuteDagState.get_next_control_output_for_non_local_execution(node)
|
|
206
|
+
else:
|
|
207
|
+
next_output = node.get_next_control_output()
|
|
180
208
|
if next_output is not None:
|
|
181
209
|
ExecuteDagState._process_next_control_node(context, node, next_output, network_name, flow_manager)
|
|
182
210
|
|
|
@@ -213,8 +241,14 @@ class ExecuteDagState(State):
|
|
|
213
241
|
"""Process the next control node in the flow."""
|
|
214
242
|
node_connection = flow_manager.get_connections().get_connected_node(node, next_output)
|
|
215
243
|
if node_connection is not None:
|
|
216
|
-
next_node,
|
|
217
|
-
|
|
244
|
+
next_node, next_parameter = node_connection
|
|
245
|
+
# Set entry control parameter
|
|
246
|
+
logger.debug(
|
|
247
|
+
"Parallel Resolution: Setting entry control parameter for node '%s' to '%s'",
|
|
248
|
+
next_node.name,
|
|
249
|
+
next_parameter.name if next_parameter else None,
|
|
250
|
+
)
|
|
251
|
+
next_node.set_entry_control_parameter(next_parameter)
|
|
218
252
|
# Prepare next node for execution
|
|
219
253
|
if not next_node.lock:
|
|
220
254
|
next_node.make_node_unresolved(
|
|
@@ -291,12 +325,8 @@ class ExecuteDagState(State):
|
|
|
291
325
|
connections = GriptapeNodes.FlowManager().get_connections()
|
|
292
326
|
|
|
293
327
|
for parameter in current_node.parameters:
|
|
294
|
-
# Skip control type parameters
|
|
295
|
-
if ParameterTypeBuiltin.CONTROL_TYPE.value.lower() == parameter.output_type:
|
|
296
|
-
continue
|
|
297
|
-
|
|
298
328
|
# Get the connected upstream node for this parameter
|
|
299
|
-
upstream_connection = connections.get_connected_node(current_node, parameter)
|
|
329
|
+
upstream_connection = connections.get_connected_node(current_node, parameter, direction=Direction.UPSTREAM)
|
|
300
330
|
if upstream_connection:
|
|
301
331
|
upstream_node, upstream_parameter = upstream_connection
|
|
302
332
|
|
|
@@ -307,21 +337,20 @@ class ExecuteDagState(State):
|
|
|
307
337
|
output_value = upstream_node.get_parameter_value(upstream_parameter.name)
|
|
308
338
|
|
|
309
339
|
# Pass the value through using the same mechanism as normal resolution
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
node_name=current_node.name,
|
|
319
|
-
value=output_value,
|
|
320
|
-
data_type=upstream_parameter.output_type,
|
|
321
|
-
incoming_connection_source_node_name=upstream_node.name,
|
|
322
|
-
incoming_connection_source_parameter_name=upstream_parameter.name,
|
|
323
|
-
)
|
|
340
|
+
result = await GriptapeNodes.get_instance().ahandle_request(
|
|
341
|
+
SetParameterValueRequest(
|
|
342
|
+
parameter_name=parameter.name,
|
|
343
|
+
node_name=current_node.name,
|
|
344
|
+
value=output_value,
|
|
345
|
+
data_type=upstream_parameter.output_type,
|
|
346
|
+
incoming_connection_source_node_name=upstream_node.name,
|
|
347
|
+
incoming_connection_source_parameter_name=upstream_parameter.name,
|
|
324
348
|
)
|
|
349
|
+
)
|
|
350
|
+
if isinstance(result, SetParameterValueResultFailure):
|
|
351
|
+
msg = f"Failed to set value for parameter '{parameter.name}' on node '{current_node.name}': {result.result_details}"
|
|
352
|
+
logger.error(msg)
|
|
353
|
+
raise RuntimeError(msg)
|
|
325
354
|
|
|
326
355
|
@staticmethod
|
|
327
356
|
def build_node_states(context: ParallelResolutionContext) -> tuple[set[str], set[str], set[str]]:
|
|
@@ -382,7 +411,10 @@ class ExecuteDagState(State):
|
|
|
382
411
|
@staticmethod
|
|
383
412
|
async def execute_node(current_node: DagNode, semaphore: asyncio.Semaphore) -> None:
|
|
384
413
|
async with semaphore:
|
|
385
|
-
|
|
414
|
+
from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
|
|
415
|
+
|
|
416
|
+
executor = GriptapeNodes.FlowManager().node_executor
|
|
417
|
+
await executor.execute(current_node.node_reference)
|
|
386
418
|
|
|
387
419
|
@staticmethod
|
|
388
420
|
async def on_enter(context: ParallelResolutionContext) -> type[State] | None:
|
|
@@ -472,11 +504,29 @@ class ExecuteDagState(State):
|
|
|
472
504
|
done, _ = await asyncio.wait(context.task_to_node.keys(), return_when=asyncio.FIRST_COMPLETED)
|
|
473
505
|
# Check for task exceptions and handle them properly
|
|
474
506
|
for task in done:
|
|
507
|
+
if task.cancelled():
|
|
508
|
+
# Task was cancelled - this is expected during flow cancellation
|
|
509
|
+
context.task_to_node.pop(task)
|
|
510
|
+
logger.info("Task execution was cancelled.")
|
|
511
|
+
return ErrorState
|
|
475
512
|
if task.exception():
|
|
476
|
-
# Get the actual exception and re-raise it
|
|
477
513
|
exc = task.exception()
|
|
514
|
+
dag_node = context.task_to_node.get(task)
|
|
515
|
+
node_name = dag_node.node_reference.name if dag_node else "Unknown"
|
|
516
|
+
node_type = dag_node.node_reference.__class__.__name__ if dag_node else "Unknown"
|
|
517
|
+
|
|
518
|
+
logger.exception(
|
|
519
|
+
"Task execution failed for node '%s' (type: %s) in flow '%s'. Exception: %s",
|
|
520
|
+
node_name,
|
|
521
|
+
node_type,
|
|
522
|
+
context.flow_name,
|
|
523
|
+
exc,
|
|
524
|
+
)
|
|
525
|
+
|
|
478
526
|
context.task_to_node.pop(task)
|
|
479
|
-
|
|
527
|
+
context.error_message = f"Task execution failed for node '{node_name}': {exc}"
|
|
528
|
+
context.workflow_state = WorkflowState.ERRORED
|
|
529
|
+
return ErrorState
|
|
480
530
|
context.task_to_node.pop(task)
|
|
481
531
|
# Once a task has finished, loop back to the top.
|
|
482
532
|
await ExecuteDagState.pop_done_states(context)
|
|
@@ -563,6 +613,21 @@ class ParallelResolutionMachine(FSM[ParallelResolutionContext]):
|
|
|
563
613
|
self.context.dag_builder = GriptapeNodes.FlowManager().global_dag_builder
|
|
564
614
|
await self.start(ExecuteDagState)
|
|
565
615
|
|
|
616
|
+
async def cancel_all_nodes(self) -> None:
|
|
617
|
+
"""Cancel all executing tasks and set cancellation flags on all nodes."""
|
|
618
|
+
# Set cancellation flag on all nodes being tracked
|
|
619
|
+
for dag_node in self.context.node_to_reference.values():
|
|
620
|
+
dag_node.node_reference.request_cancellation()
|
|
621
|
+
|
|
622
|
+
# Cancel all running tasks
|
|
623
|
+
tasks = list(self.context.task_to_node.keys())
|
|
624
|
+
for task in tasks:
|
|
625
|
+
if not task.done():
|
|
626
|
+
task.cancel()
|
|
627
|
+
|
|
628
|
+
# Wait for all tasks to complete
|
|
629
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
|
630
|
+
|
|
566
631
|
def change_debug_mode(self, *, debug_mode: bool) -> None:
|
|
567
632
|
self._context.paused = debug_mode
|
|
568
633
|
|
|
@@ -575,3 +640,7 @@ class ParallelResolutionMachine(FSM[ParallelResolutionContext]):
|
|
|
575
640
|
def reset_machine(self, *, cancel: bool = False) -> None:
|
|
576
641
|
self._context.reset(cancel=cancel)
|
|
577
642
|
self._current_state = None
|
|
643
|
+
|
|
644
|
+
def get_last_resolved_node(self) -> BaseNode | None:
|
|
645
|
+
"""Get the last node that was resolved in the DAG execution."""
|
|
646
|
+
return self._context.last_resolved_node
|
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import asyncio
|
|
3
4
|
import logging
|
|
4
5
|
from dataclasses import dataclass
|
|
5
6
|
|
|
6
|
-
from griptape_nodes.exe_types.
|
|
7
|
+
from griptape_nodes.exe_types.connections import Direction
|
|
8
|
+
from griptape_nodes.exe_types.core_types import ParameterTypeBuiltin
|
|
7
9
|
from griptape_nodes.exe_types.node_types import BaseNode, NodeResolutionState
|
|
8
10
|
from griptape_nodes.exe_types.type_validator import TypeValidator
|
|
9
11
|
from griptape_nodes.machines.fsm import FSM, State
|
|
@@ -22,6 +24,7 @@ from griptape_nodes.retained_mode.events.execution_events import (
|
|
|
22
24
|
)
|
|
23
25
|
from griptape_nodes.retained_mode.events.parameter_events import (
|
|
24
26
|
SetParameterValueRequest,
|
|
27
|
+
SetParameterValueResultFailure,
|
|
25
28
|
)
|
|
26
29
|
from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
|
|
27
30
|
|
|
@@ -31,6 +34,7 @@ logger = logging.getLogger("griptape_nodes")
|
|
|
31
34
|
@dataclass
|
|
32
35
|
class Focus:
|
|
33
36
|
node: BaseNode
|
|
37
|
+
task: asyncio.Task[None] | None = None
|
|
34
38
|
|
|
35
39
|
|
|
36
40
|
# This is on a per-node basis
|
|
@@ -163,12 +167,8 @@ class ExecuteNodeState(State):
|
|
|
163
167
|
connections = GriptapeNodes.FlowManager().get_connections()
|
|
164
168
|
|
|
165
169
|
for parameter in current_node.parameters:
|
|
166
|
-
# Skip control type parameters
|
|
167
|
-
if ParameterTypeBuiltin.CONTROL_TYPE.value.lower() == parameter.output_type:
|
|
168
|
-
continue
|
|
169
|
-
|
|
170
170
|
# Get the connected upstream node for this parameter
|
|
171
|
-
upstream_connection = connections.get_connected_node(current_node, parameter)
|
|
171
|
+
upstream_connection = connections.get_connected_node(current_node, parameter, direction=Direction.UPSTREAM)
|
|
172
172
|
if upstream_connection:
|
|
173
173
|
upstream_node, upstream_parameter = upstream_connection
|
|
174
174
|
|
|
@@ -179,21 +179,20 @@ class ExecuteNodeState(State):
|
|
|
179
179
|
output_value = upstream_node.get_parameter_value(upstream_parameter.name)
|
|
180
180
|
|
|
181
181
|
# Pass the value through using the same mechanism as normal resolution
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
node_name=current_node.name,
|
|
191
|
-
value=output_value,
|
|
192
|
-
data_type=upstream_parameter.output_type,
|
|
193
|
-
incoming_connection_source_node_name=upstream_node.name,
|
|
194
|
-
incoming_connection_source_parameter_name=upstream_parameter.name,
|
|
195
|
-
)
|
|
182
|
+
result = GriptapeNodes.get_instance().handle_request(
|
|
183
|
+
SetParameterValueRequest(
|
|
184
|
+
parameter_name=parameter.name,
|
|
185
|
+
node_name=current_node.name,
|
|
186
|
+
value=output_value,
|
|
187
|
+
data_type=upstream_parameter.output_type,
|
|
188
|
+
incoming_connection_source_node_name=upstream_node.name,
|
|
189
|
+
incoming_connection_source_parameter_name=upstream_parameter.name,
|
|
196
190
|
)
|
|
191
|
+
)
|
|
192
|
+
if isinstance(result, SetParameterValueResultFailure):
|
|
193
|
+
msg = f"Failed to set parameter value for node '{current_node.name}' and parameter '{parameter.name}'. Details: {result.result_details}"
|
|
194
|
+
logger.error(msg)
|
|
195
|
+
raise RuntimeError(msg)
|
|
197
196
|
|
|
198
197
|
@staticmethod
|
|
199
198
|
async def on_enter(context: ResolutionContext) -> type[State] | None:
|
|
@@ -270,7 +269,26 @@ class ExecuteNodeState(State):
|
|
|
270
269
|
current_node = current_focus.node
|
|
271
270
|
|
|
272
271
|
try:
|
|
273
|
-
|
|
272
|
+
from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
|
|
273
|
+
|
|
274
|
+
executor = GriptapeNodes.FlowManager().node_executor
|
|
275
|
+
# Create and track task in Focus for cancellation support
|
|
276
|
+
execution_task = asyncio.create_task(executor.execute(current_node))
|
|
277
|
+
current_focus.task = execution_task
|
|
278
|
+
await execution_task
|
|
279
|
+
except asyncio.CancelledError:
|
|
280
|
+
logger.info("Node '%s' processing was cancelled.", current_node.name)
|
|
281
|
+
current_node.make_node_unresolved(
|
|
282
|
+
current_states_to_trigger_change_event=set(
|
|
283
|
+
{NodeResolutionState.UNRESOLVED, NodeResolutionState.RESOLVED, NodeResolutionState.RESOLVING}
|
|
284
|
+
)
|
|
285
|
+
)
|
|
286
|
+
GriptapeNodes.EventManager().put_event(
|
|
287
|
+
ExecutionGriptapeNodeEvent(
|
|
288
|
+
wrapped_event=ExecutionEvent(payload=NodeFinishProcessEvent(node_name=current_node.name))
|
|
289
|
+
)
|
|
290
|
+
)
|
|
291
|
+
return CompleteState
|
|
274
292
|
except Exception as e:
|
|
275
293
|
logger.exception("Error processing node '%s", current_node.name)
|
|
276
294
|
msg = f"Canceling flow run. Node '{current_node.name}' encountered a problem: {e}"
|
|
@@ -283,7 +301,7 @@ class ExecuteNodeState(State):
|
|
|
283
301
|
|
|
284
302
|
from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
|
|
285
303
|
|
|
286
|
-
GriptapeNodes.FlowManager().cancel_flow_run()
|
|
304
|
+
await GriptapeNodes.FlowManager().cancel_flow_run()
|
|
287
305
|
|
|
288
306
|
GriptapeNodes.EventManager().put_event(
|
|
289
307
|
ExecutionGriptapeNodeEvent(
|
|
@@ -381,6 +399,27 @@ class SequentialResolutionMachine(FSM[ResolutionContext]):
|
|
|
381
399
|
self._context.focus_stack.append(Focus(node=node))
|
|
382
400
|
await self.start(InitializeSpotlightState)
|
|
383
401
|
|
|
402
|
+
async def cancel_all_nodes(self) -> None:
|
|
403
|
+
"""Cancel the currently executing node and set cancellation flags on all nodes in focus stack."""
|
|
404
|
+
# Set cancellation flag on all nodes in the focus stack
|
|
405
|
+
for focus in self._context.focus_stack:
|
|
406
|
+
focus.node.request_cancellation()
|
|
407
|
+
|
|
408
|
+
# Collect tasks that need to be cancelled
|
|
409
|
+
tasks = []
|
|
410
|
+
if self._context.focus_stack:
|
|
411
|
+
current_focus = self._context.focus_stack[-1]
|
|
412
|
+
if current_focus.task and not current_focus.task.done():
|
|
413
|
+
tasks.append(current_focus.task)
|
|
414
|
+
|
|
415
|
+
# Cancel all tasks
|
|
416
|
+
for task in tasks:
|
|
417
|
+
task.cancel()
|
|
418
|
+
|
|
419
|
+
# Wait for all tasks to complete
|
|
420
|
+
if tasks:
|
|
421
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
|
422
|
+
|
|
384
423
|
def change_debug_mode(self, debug_mode: bool) -> None: # noqa: FBT001
|
|
385
424
|
self._context.paused = debug_mode
|
|
386
425
|
|
|
@@ -50,6 +50,13 @@ class IconVariant(BaseModel):
|
|
|
50
50
|
dark: str
|
|
51
51
|
|
|
52
52
|
|
|
53
|
+
class NodeDeprecationMetadata(BaseModel):
|
|
54
|
+
"""Metadata about a deprecated node."""
|
|
55
|
+
|
|
56
|
+
deprecation_message: str | None = None
|
|
57
|
+
removal_version: str | None = None
|
|
58
|
+
|
|
59
|
+
|
|
53
60
|
class NodeMetadata(BaseModel):
|
|
54
61
|
"""Metadata about each node within the library, which informs where in the hierarchy it sits, details on usage, and tags to assist search."""
|
|
55
62
|
|
|
@@ -60,6 +67,7 @@ class NodeMetadata(BaseModel):
|
|
|
60
67
|
icon: str | IconVariant | None = None
|
|
61
68
|
color: str | None = None
|
|
62
69
|
group: str | None = None
|
|
70
|
+
deprecation: NodeDeprecationMetadata | None = None
|
|
63
71
|
|
|
64
72
|
|
|
65
73
|
class CategoryDefinition(BaseModel):
|
|
@@ -99,7 +107,7 @@ class LibrarySchema(BaseModel):
|
|
|
99
107
|
library itself.
|
|
100
108
|
"""
|
|
101
109
|
|
|
102
|
-
LATEST_SCHEMA_VERSION: ClassVar[str] = "0.
|
|
110
|
+
LATEST_SCHEMA_VERSION: ClassVar[str] = "0.3.0"
|
|
103
111
|
|
|
104
112
|
name: str
|
|
105
113
|
library_schema_version: str
|
|
@@ -359,3 +367,18 @@ class Library:
|
|
|
359
367
|
The AdvancedNodeLibrary instance, or None if not set
|
|
360
368
|
"""
|
|
361
369
|
return self._advanced_library
|
|
370
|
+
|
|
371
|
+
def get_nodes_by_base_type(self, base_type: type) -> list[str]:
|
|
372
|
+
"""Get all node types in this library that are subclasses of the specified base type.
|
|
373
|
+
|
|
374
|
+
Args:
|
|
375
|
+
base_type: The base class to filter by (e.g., StartNode, ControlNode)
|
|
376
|
+
|
|
377
|
+
Returns:
|
|
378
|
+
List of node type names that extend the base type
|
|
379
|
+
"""
|
|
380
|
+
matching_nodes = []
|
|
381
|
+
for node_type, node_class in self._node_types.items():
|
|
382
|
+
if issubclass(node_class, base_type):
|
|
383
|
+
matching_nodes.append(node_type)
|
|
384
|
+
return matching_nodes
|
|
@@ -4,7 +4,7 @@ import json
|
|
|
4
4
|
import logging
|
|
5
5
|
from datetime import datetime # noqa: TC003 (can't put into type checking block as Pydantic model relies on it)
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Any, ClassVar
|
|
7
|
+
from typing import Any, ClassVar, NamedTuple
|
|
8
8
|
|
|
9
9
|
from pydantic import BaseModel, Field, field_serializer, field_validator
|
|
10
10
|
|
|
@@ -15,6 +15,12 @@ from griptape_nodes.utils.metaclasses import SingletonMeta
|
|
|
15
15
|
|
|
16
16
|
logger = logging.getLogger("griptape_nodes")
|
|
17
17
|
|
|
18
|
+
|
|
19
|
+
class LibraryNameAndNodeType(NamedTuple):
|
|
20
|
+
library_name: str
|
|
21
|
+
node_type: str
|
|
22
|
+
|
|
23
|
+
|
|
18
24
|
# Type aliases for clarity
|
|
19
25
|
type NodeName = str
|
|
20
26
|
type ParameterName = str
|
|
@@ -39,12 +45,13 @@ class WorkflowShape(BaseModel):
|
|
|
39
45
|
|
|
40
46
|
|
|
41
47
|
class WorkflowMetadata(BaseModel):
|
|
42
|
-
LATEST_SCHEMA_VERSION: ClassVar[str] = "0.
|
|
48
|
+
LATEST_SCHEMA_VERSION: ClassVar[str] = "0.9.0"
|
|
43
49
|
|
|
44
50
|
name: str
|
|
45
51
|
schema_version: str
|
|
46
52
|
engine_version_created_with: str
|
|
47
53
|
node_libraries_referenced: list[LibraryNameAndVersion]
|
|
54
|
+
node_types_used: set[LibraryNameAndNodeType] = Field(default_factory=set)
|
|
48
55
|
workflows_referenced: list[str] | None = None
|
|
49
56
|
description: str | None = None
|
|
50
57
|
image: str | None = None
|
|
@@ -55,6 +62,35 @@ class WorkflowMetadata(BaseModel):
|
|
|
55
62
|
branched_from: str | None = Field(default=None)
|
|
56
63
|
workflow_shape: WorkflowShape | None = Field(default=None)
|
|
57
64
|
|
|
65
|
+
@field_serializer("node_types_used")
|
|
66
|
+
def serialize_node_types_used(self, node_types_used: set[LibraryNameAndNodeType]) -> list[list[str]]:
|
|
67
|
+
"""Serialize node_types_used as list of tuples for TOML compatibility.
|
|
68
|
+
|
|
69
|
+
Sets and NamedTuples are not directly supported by TOML, so we convert the set
|
|
70
|
+
to a list of lists (each inner list represents [library_name, node_type]).
|
|
71
|
+
"""
|
|
72
|
+
return [[nt.library_name, nt.node_type] for nt in sorted(node_types_used)]
|
|
73
|
+
|
|
74
|
+
@field_validator("node_types_used", mode="before")
|
|
75
|
+
@classmethod
|
|
76
|
+
def validate_node_types_used(cls, value: Any) -> set[LibraryNameAndNodeType]:
|
|
77
|
+
"""Deserialize node_types_used from list of lists during TOML loading.
|
|
78
|
+
|
|
79
|
+
When loading workflow metadata from TOML files, the node_types_used field
|
|
80
|
+
is stored as a list of [library_name, node_type] pairs that needs to be
|
|
81
|
+
converted back to a set of LibraryNameAndNodeType objects. This validator
|
|
82
|
+
handles the expected input formats:
|
|
83
|
+
- List of lists (from TOML deserialization)
|
|
84
|
+
- Set of LibraryNameAndNodeType (from direct Python construction)
|
|
85
|
+
- Empty list (for workflows with no nodes)
|
|
86
|
+
"""
|
|
87
|
+
if isinstance(value, set):
|
|
88
|
+
return value
|
|
89
|
+
if isinstance(value, list):
|
|
90
|
+
return {LibraryNameAndNodeType(library_name=item[0], node_type=item[1]) for item in value}
|
|
91
|
+
msg = f"Expected list or set for node_types_used, got {type(value)}"
|
|
92
|
+
raise ValueError(msg)
|
|
93
|
+
|
|
58
94
|
@field_serializer("workflow_shape")
|
|
59
95
|
def serialize_workflow_shape(self, workflow_shape: WorkflowShape | None) -> str | None:
|
|
60
96
|
"""Serialize WorkflowShape as JSON string to avoid TOML serialization issues.
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
2
|
from typing import Any
|
|
3
3
|
|
|
4
4
|
from griptape_nodes.retained_mode.events.base_events import (
|
|
@@ -10,6 +10,7 @@ from griptape_nodes.retained_mode.events.base_events import (
|
|
|
10
10
|
WorkflowAlteredMixin,
|
|
11
11
|
WorkflowNotAlteredMixin,
|
|
12
12
|
)
|
|
13
|
+
from griptape_nodes.retained_mode.events.node_events import SerializedNodeCommands
|
|
13
14
|
from griptape_nodes.retained_mode.events.payload_registry import PayloadRegistry
|
|
14
15
|
|
|
15
16
|
# Requests and Results TO/FROM USER! These begin requests - and are not fully Execution Events.
|
|
@@ -72,6 +73,8 @@ class StartFlowRequest(RequestPayload):
|
|
|
72
73
|
flow_name: str | None = None
|
|
73
74
|
flow_node_name: str | None = None
|
|
74
75
|
debug_mode: bool = False
|
|
76
|
+
# If this is true, the final ControlFLowResolvedEvent will be pickled to be picked up from inside a subprocess.
|
|
77
|
+
pickle_control_flow_result: bool = False
|
|
75
78
|
|
|
76
79
|
|
|
77
80
|
@dataclass
|
|
@@ -303,6 +306,10 @@ class ParameterSpotlightEvent(ExecutionPayload):
|
|
|
303
306
|
class ControlFlowResolvedEvent(ExecutionPayload):
|
|
304
307
|
end_node_name: str
|
|
305
308
|
parameter_output_values: dict
|
|
309
|
+
# Optional field for pickled parameter values - when present, parameter_output_values contains UUID references
|
|
310
|
+
unique_parameter_uuid_to_values: dict[SerializedNodeCommands.UniqueParameterValueUUID, Any] | None = field(
|
|
311
|
+
default=None
|
|
312
|
+
)
|
|
306
313
|
|
|
307
314
|
|
|
308
315
|
@dataclass
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
2
|
-
from typing import Any
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import Any, NamedTuple
|
|
3
3
|
|
|
4
4
|
from griptape_nodes.exe_types.node_types import NodeDependencies
|
|
5
|
-
from griptape_nodes.node_library.workflow_registry import WorkflowShape
|
|
5
|
+
from griptape_nodes.node_library.workflow_registry import LibraryNameAndNodeType, WorkflowShape
|
|
6
6
|
from griptape_nodes.retained_mode.events.base_events import (
|
|
7
7
|
RequestPayload,
|
|
8
8
|
ResultPayloadFailure,
|
|
@@ -201,6 +201,8 @@ class SerializedFlowCommands:
|
|
|
201
201
|
node_dependencies (NodeDependencies): Aggregated dependencies from all nodes in this flow and its sub-flows.
|
|
202
202
|
Includes referenced workflows, static files, Python imports, and libraries. Used for workflow packaging,
|
|
203
203
|
dependency resolution, and deployment planning.
|
|
204
|
+
node_types_used (set[LibraryNameAndNodeType]): Set of all node types used in this flow and its sub-flows.
|
|
205
|
+
Each entry contains the library name and node type name pair, used for tracking which node types are utilized.
|
|
204
206
|
"""
|
|
205
207
|
|
|
206
208
|
@dataclass
|
|
@@ -231,6 +233,7 @@ class SerializedFlowCommands:
|
|
|
231
233
|
set_lock_commands_per_node: dict[SerializedNodeCommands.NodeUUID, SetLockNodeStateRequest]
|
|
232
234
|
sub_flows_commands: list["SerializedFlowCommands"]
|
|
233
235
|
node_dependencies: NodeDependencies
|
|
236
|
+
node_types_used: set[LibraryNameAndNodeType]
|
|
234
237
|
|
|
235
238
|
|
|
236
239
|
@dataclass
|
|
@@ -438,3 +441,87 @@ class PackageNodeAsSerializedFlowResultFailure(WorkflowNotAlteredMixin, ResultPa
|
|
|
438
441
|
Common causes: node not found, no current context, serialization error,
|
|
439
442
|
connection analysis failed, node has no valid flow context.
|
|
440
443
|
"""
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
# Type aliases for parameter mapping clarity
|
|
447
|
+
SanitizedParameterName = str # What appears in the serialized flow
|
|
448
|
+
OriginalNodeName = str # Original node name (can have spaces, dots, etc.)
|
|
449
|
+
OriginalParameterName = str # Original parameter name
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
class OriginalNodeParameter(NamedTuple):
|
|
453
|
+
"""Represents the original source of a parameter before sanitization."""
|
|
454
|
+
|
|
455
|
+
node_name: OriginalNodeName
|
|
456
|
+
parameter_name: OriginalParameterName
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
class ParameterNameMapping(NamedTuple):
|
|
460
|
+
"""Maps a sanitized parameter name back to its original node and parameter."""
|
|
461
|
+
|
|
462
|
+
output_sanitized_parameter_name: SanitizedParameterName
|
|
463
|
+
original: OriginalNodeParameter
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
@dataclass
|
|
467
|
+
@PayloadRegistry.register
|
|
468
|
+
class PackageNodesAsSerializedFlowRequest(RequestPayload):
|
|
469
|
+
"""Package multiple nodes as a complete flow with artificial start and end nodes.
|
|
470
|
+
|
|
471
|
+
Creates a serialized flow where:
|
|
472
|
+
- Start node has output parameters matching all selected nodes' incoming connections
|
|
473
|
+
- All selected nodes maintain their existing connections between each other
|
|
474
|
+
- End node has input parameters matching all selected nodes' outgoing connections
|
|
475
|
+
- Flow structure: Start → [Selected Nodes with internal connections] → End
|
|
476
|
+
|
|
477
|
+
Use when: Creating complex reusable components, exporting node groups for templates,
|
|
478
|
+
building multi-step sub-workflows, packaging interconnected functionality.
|
|
479
|
+
|
|
480
|
+
Args:
|
|
481
|
+
node_names: List of node names to package as a flow (empty list will create StartFlow→EndFlow only with warning)
|
|
482
|
+
start_node_type: Node type name for the artificial start node (defaults to "StartFlow")
|
|
483
|
+
end_node_type: Node type name for the artificial end node (defaults to "EndFlow")
|
|
484
|
+
start_end_specific_library_name: Library name containing the start/end nodes (defaults to "Griptape Nodes Library")
|
|
485
|
+
entry_control_node_name: Name of the node that should receive the control flow entry (required if entry_control_parameter_name specified)
|
|
486
|
+
entry_control_parameter_name: Name of the control parameter on the entry node (None for auto-detection of first available control parameter)
|
|
487
|
+
output_parameter_prefix: Prefix for parameter names on the generated end node to avoid collisions (defaults to "packaged_node_")
|
|
488
|
+
|
|
489
|
+
Results: PackageNodesAsSerializedFlowResultSuccess (with serialized flow and node name mapping) | PackageNodesAsSerializedFlowResultFailure
|
|
490
|
+
"""
|
|
491
|
+
|
|
492
|
+
# List of node names to package (empty list creates StartFlow→EndFlow only with warning)
|
|
493
|
+
node_names: list[str] = field(default_factory=list)
|
|
494
|
+
start_node_type: str = "StartFlow"
|
|
495
|
+
end_node_type: str = "EndFlow"
|
|
496
|
+
start_end_specific_library_name: str = "Griptape Nodes Library"
|
|
497
|
+
entry_control_node_name: str | None = None
|
|
498
|
+
entry_control_parameter_name: str | None = None
|
|
499
|
+
output_parameter_prefix: str = "packaged_node_"
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
@dataclass
|
|
503
|
+
@PayloadRegistry.register
|
|
504
|
+
class PackageNodesAsSerializedFlowResultSuccess(WorkflowNotAlteredMixin, ResultPayloadSuccess):
|
|
505
|
+
"""Multiple nodes successfully packaged as serialized flow.
|
|
506
|
+
|
|
507
|
+
Args:
|
|
508
|
+
serialized_flow_commands: The complete serialized flow with StartFlow, selected nodes with preserved connections, and EndFlow
|
|
509
|
+
workflow_shape: The workflow shape defining inputs and outputs for external callers
|
|
510
|
+
packaged_node_names: List of node names that were included in the package
|
|
511
|
+
parameter_name_mappings: Dict mapping sanitized parameter names to original node and parameter names for O(1) lookup
|
|
512
|
+
"""
|
|
513
|
+
|
|
514
|
+
serialized_flow_commands: SerializedFlowCommands
|
|
515
|
+
workflow_shape: WorkflowShape
|
|
516
|
+
packaged_node_names: list[str]
|
|
517
|
+
parameter_name_mappings: dict[SanitizedParameterName, OriginalNodeParameter]
|
|
518
|
+
|
|
519
|
+
|
|
520
|
+
@dataclass
|
|
521
|
+
@PayloadRegistry.register
|
|
522
|
+
class PackageNodesAsSerializedFlowResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure):
|
|
523
|
+
"""Multiple nodes packaging failed.
|
|
524
|
+
|
|
525
|
+
Common causes: one or more nodes not found, no current context, serialization error,
|
|
526
|
+
entry control node/parameter not found, connection analysis failed.
|
|
527
|
+
"""
|