griptape-nodes 0.57.1__py3-none-any.whl → 0.58.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. griptape_nodes/api_client/__init__.py +9 -0
  2. griptape_nodes/api_client/client.py +279 -0
  3. griptape_nodes/api_client/request_client.py +273 -0
  4. griptape_nodes/app/app.py +57 -150
  5. griptape_nodes/bootstrap/utils/python_subprocess_executor.py +1 -1
  6. griptape_nodes/bootstrap/workflow_executors/local_session_workflow_executor.py +22 -50
  7. griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +6 -1
  8. griptape_nodes/bootstrap/workflow_executors/subprocess_workflow_executor.py +27 -46
  9. griptape_nodes/bootstrap/workflow_executors/utils/subprocess_script.py +7 -0
  10. griptape_nodes/bootstrap/workflow_publishers/local_workflow_publisher.py +3 -1
  11. griptape_nodes/bootstrap/workflow_publishers/subprocess_workflow_publisher.py +3 -1
  12. griptape_nodes/bootstrap/workflow_publishers/utils/subprocess_script.py +16 -1
  13. griptape_nodes/common/node_executor.py +466 -0
  14. griptape_nodes/drivers/storage/base_storage_driver.py +0 -11
  15. griptape_nodes/drivers/storage/griptape_cloud_storage_driver.py +7 -25
  16. griptape_nodes/drivers/storage/local_storage_driver.py +2 -2
  17. griptape_nodes/exe_types/connections.py +37 -9
  18. griptape_nodes/exe_types/core_types.py +1 -1
  19. griptape_nodes/exe_types/node_types.py +115 -22
  20. griptape_nodes/machines/control_flow.py +48 -7
  21. griptape_nodes/machines/parallel_resolution.py +98 -29
  22. griptape_nodes/machines/sequential_resolution.py +61 -22
  23. griptape_nodes/node_library/library_registry.py +24 -1
  24. griptape_nodes/node_library/workflow_registry.py +38 -2
  25. griptape_nodes/retained_mode/events/execution_events.py +8 -1
  26. griptape_nodes/retained_mode/events/flow_events.py +90 -3
  27. griptape_nodes/retained_mode/events/node_events.py +17 -10
  28. griptape_nodes/retained_mode/events/workflow_events.py +5 -0
  29. griptape_nodes/retained_mode/griptape_nodes.py +16 -219
  30. griptape_nodes/retained_mode/managers/config_manager.py +0 -46
  31. griptape_nodes/retained_mode/managers/engine_identity_manager.py +225 -74
  32. griptape_nodes/retained_mode/managers/flow_manager.py +1276 -230
  33. griptape_nodes/retained_mode/managers/library_manager.py +7 -8
  34. griptape_nodes/retained_mode/managers/node_manager.py +197 -9
  35. griptape_nodes/retained_mode/managers/secrets_manager.py +26 -0
  36. griptape_nodes/retained_mode/managers/session_manager.py +264 -227
  37. griptape_nodes/retained_mode/managers/settings.py +4 -38
  38. griptape_nodes/retained_mode/managers/static_files_manager.py +3 -3
  39. griptape_nodes/retained_mode/managers/version_compatibility_manager.py +135 -6
  40. griptape_nodes/retained_mode/managers/workflow_manager.py +206 -78
  41. griptape_nodes/servers/mcp.py +23 -15
  42. griptape_nodes/utils/async_utils.py +36 -0
  43. griptape_nodes/utils/dict_utils.py +8 -2
  44. griptape_nodes/version_compatibility/versions/v0_39_0/modified_parameters_set_removal.py +11 -6
  45. griptape_nodes/version_compatibility/workflow_versions/v0_7_0/local_executor_argument_addition.py +12 -5
  46. {griptape_nodes-0.57.1.dist-info → griptape_nodes-0.58.0.dist-info}/METADATA +4 -3
  47. {griptape_nodes-0.57.1.dist-info → griptape_nodes-0.58.0.dist-info}/RECORD +49 -47
  48. {griptape_nodes-0.57.1.dist-info → griptape_nodes-0.58.0.dist-info}/WHEEL +1 -1
  49. griptape_nodes/retained_mode/utils/engine_identity.py +0 -245
  50. griptape_nodes/servers/ws_request_manager.py +0 -268
  51. {griptape_nodes-0.57.1.dist-info → griptape_nodes-0.58.0.dist-info}/entry_points.txt +0 -0
@@ -12,6 +12,7 @@ from inspect import getmodule, isclass
12
12
  from pathlib import Path
13
13
  from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, TypeVar, cast
14
14
 
15
+ import semver
15
16
  import tomlkit
16
17
  from rich.box import HEAVY_EDGE
17
18
  from rich.console import Console
@@ -23,7 +24,12 @@ from griptape_nodes.drivers.storage import StorageBackend
23
24
  from griptape_nodes.exe_types.core_types import ParameterTypeBuiltin
24
25
  from griptape_nodes.exe_types.flow import ControlFlow
25
26
  from griptape_nodes.exe_types.node_types import BaseNode, EndNode, StartNode
26
- from griptape_nodes.node_library.workflow_registry import Workflow, WorkflowMetadata, WorkflowRegistry, WorkflowShape
27
+ from griptape_nodes.node_library.workflow_registry import (
28
+ Workflow,
29
+ WorkflowMetadata,
30
+ WorkflowRegistry,
31
+ WorkflowShape,
32
+ )
27
33
  from griptape_nodes.retained_mode.events.app_events import (
28
34
  GetEngineVersionRequest,
29
35
  GetEngineVersionResultSuccess,
@@ -108,7 +114,6 @@ from griptape_nodes.retained_mode.events.workflow_events import (
108
114
  )
109
115
  from griptape_nodes.retained_mode.griptape_nodes import (
110
116
  GriptapeNodes,
111
- Version,
112
117
  )
113
118
  from griptape_nodes.retained_mode.managers.os_manager import OSManager
114
119
 
@@ -233,6 +238,25 @@ class WorkflowManager:
233
238
  execution_successful: bool
234
239
  execution_details: str
235
240
 
241
+ class SaveWorkflowScenario(StrEnum):
242
+ """Scenarios for saving workflows."""
243
+
244
+ FIRST_SAVE = "first_save" # First save of new workflow
245
+ OVERWRITE_EXISTING = "overwrite_existing" # Save existing workflow to same name
246
+ SAVE_AS = "save_as" # Save existing workflow with new name
247
+ SAVE_FROM_TEMPLATE = "save_from_template" # Save from a template
248
+
249
+ @dataclass
250
+ class SaveWorkflowTargetInfo:
251
+ """Target information for saving a workflow."""
252
+
253
+ scenario: WorkflowManager.SaveWorkflowScenario # Which save scenario we're in
254
+ file_name: str # Final resolved name to use
255
+ file_path: Path # Absolute path where file will be written
256
+ relative_file_path: str # Relative path for registry
257
+ creation_date: datetime # When workflow was originally created
258
+ branched_from: str | None # Workflow this was branched from (if any)
259
+
236
260
  def __init__(self, event_manager: EventManager) -> None:
237
261
  self._workflow_file_path_to_info = {}
238
262
  self._squelch_workflow_altered_count = 0
@@ -929,8 +953,9 @@ class WorkflowManager:
929
953
  for node_library_referenced in workflow_metadata.node_libraries_referenced:
930
954
  library_name = node_library_referenced.library_name
931
955
  desired_version_str = node_library_referenced.library_version
932
- desired_version = Version.from_string(desired_version_str)
933
- if desired_version is None:
956
+ try:
957
+ desired_version = semver.VersionInfo.parse(desired_version_str)
958
+ except Exception:
934
959
  had_critical_error = True
935
960
  problems.append(
936
961
  f"Workflow cited an invalid version string '{desired_version_str}' for library '{library_name}'. Must be specified in major.minor.patch format."
@@ -974,8 +999,9 @@ class WorkflowManager:
974
999
  # Attempt to parse out the version string.
975
1000
  library_metadata = library_metadata_result.metadata
976
1001
  library_version_str = library_metadata.library_version
977
- library_version = Version.from_string(version_string=library_version_str)
978
- if library_version is None:
1002
+ try:
1003
+ library_version = semver.VersionInfo.parse(library_version_str)
1004
+ except Exception:
979
1005
  had_critical_error = True
980
1006
  problems.append(
981
1007
  f"Library an invalid version string '{library_version_str}' for library '{library_name}'. Must be specified in major.minor.patch format."
@@ -1166,67 +1192,37 @@ class WorkflowManager:
1166
1192
 
1167
1193
  return self.WriteWorkflowFileResult(success=True, error_details="")
1168
1194
 
1169
- def on_save_workflow_request(self, request: SaveWorkflowRequest) -> ResultPayload: # noqa: C901, PLR0912, PLR0915
1170
- # Start with the file name provided; we may change it.
1171
- file_name = request.file_name
1172
-
1173
- # See if we had an existing workflow for this.
1174
- prior_workflow = None
1175
- creation_date = None
1176
- if file_name and WorkflowRegistry.has_workflow_with_name(file_name):
1177
- # Get the metadata.
1178
- prior_workflow = WorkflowRegistry.get_workflow_by_name(file_name)
1179
- # We'll use its creation date.
1180
- creation_date = prior_workflow.metadata.creation_date
1181
- elif file_name:
1182
- # If no prior workflow exists for the new name, check if there's a current workflow
1183
- # context (e.g., during rename operations) to preserve metadata from
1184
- context_manager = GriptapeNodes.ContextManager()
1185
- if context_manager.has_current_workflow():
1186
- current_workflow_name = context_manager.get_current_workflow_name()
1187
- if current_workflow_name and WorkflowRegistry.has_workflow_with_name(current_workflow_name):
1188
- prior_workflow = WorkflowRegistry.get_workflow_by_name(current_workflow_name)
1189
- creation_date = prior_workflow.metadata.creation_date
1195
+ def on_save_workflow_request(self, request: SaveWorkflowRequest) -> ResultPayload:
1196
+ # Determine save target (file path, name, metadata)
1197
+ context_manager = GriptapeNodes.ContextManager()
1198
+ current_workflow_name = (
1199
+ context_manager.get_current_workflow_name() if context_manager.has_current_workflow() else None
1200
+ )
1190
1201
 
1191
- if (creation_date is None) or (creation_date == WorkflowManager.EPOCH_START):
1192
- # Either a new workflow, or a backcompat situation.
1193
- creation_date = datetime.now(tz=UTC)
1202
+ try:
1203
+ save_target = self._determine_save_target(
1204
+ requested_file_name=request.file_name,
1205
+ current_workflow_name=current_workflow_name,
1206
+ )
1207
+ except ValueError as e:
1208
+ details = f"Attempted to save workflow. Failed when determining save target: {e}"
1209
+ return SaveWorkflowResultFailure(result_details=details)
1194
1210
 
1195
- # Let's see if this is a template file; if so, re-route it as a copy in the customer's workflow directory.
1196
- if prior_workflow and prior_workflow.metadata.is_template:
1197
- # Aha! User is attempting to save a template. Create a differently-named file in their workspace.
1198
- # Find the first available file name that doesn't conflict.
1199
- curr_idx = 1
1200
- free_file_found = False
1201
- while not free_file_found:
1202
- # Composite a new candidate file name to test.
1203
- new_file_name = f"{file_name}_{curr_idx}"
1204
- new_file_name_with_extension = f"{new_file_name}.py"
1205
- new_file_full_path = GriptapeNodes.ConfigManager().workspace_path.joinpath(new_file_name_with_extension)
1206
- if new_file_full_path.exists():
1207
- # Keep going.
1208
- curr_idx += 1
1209
- else:
1210
- free_file_found = True
1211
- file_name = new_file_name
1212
-
1213
- # Get file name stuff prepped.
1214
- # Use the existing registered file path if this is an existing workflow (not a template)
1215
- if prior_workflow and not prior_workflow.metadata.is_template:
1216
- # Use the existing registered file path
1217
- relative_file_path = prior_workflow.file_path
1218
- file_path = Path(WorkflowRegistry.get_complete_file_path(relative_file_path))
1219
- # Extract file name from the path for metadata generation
1220
- if not file_name:
1221
- file_name = prior_workflow.metadata.name
1222
- else:
1223
- # Create new path in workspace for new workflows or templates
1224
- if not file_name:
1225
- file_name = datetime.now(tz=UTC).strftime("%d.%m_%H.%M")
1226
- relative_file_path = f"{file_name}.py"
1227
- file_path = GriptapeNodes.ConfigManager().workspace_path.joinpath(relative_file_path)
1211
+ file_name = save_target.file_name
1212
+ file_path = save_target.file_path
1213
+ relative_file_path = save_target.relative_file_path
1214
+ creation_date = save_target.creation_date
1215
+ branched_from = save_target.branched_from
1216
+
1217
+ logger.info(
1218
+ "Save workflow: scenario=%s, file_name=%s, file_path=%s, branched_from=%s",
1219
+ save_target.scenario.value,
1220
+ file_name,
1221
+ str(file_path),
1222
+ branched_from if branched_from else "None",
1223
+ )
1228
1224
 
1229
- # First, serialize the current workflow state
1225
+ # Serialize the current workflow state
1230
1226
  top_level_flow_request = GetTopLevelFlowRequest()
1231
1227
  top_level_flow_result = GriptapeNodes.handle_request(top_level_flow_request)
1232
1228
  if not isinstance(top_level_flow_result, GetTopLevelFlowResultSuccess):
@@ -1244,11 +1240,6 @@ class WorkflowManager:
1244
1240
 
1245
1241
  serialized_flow_commands = serialized_flow_result.serialized_flow_commands
1246
1242
 
1247
- # Extract branched_from information if it exists
1248
- branched_from = None
1249
- if prior_workflow and prior_workflow.metadata.branched_from:
1250
- branched_from = prior_workflow.metadata.branched_from
1251
-
1252
1243
  # Extract workflow shape if possible
1253
1244
  workflow_shape = None
1254
1245
  try:
@@ -1259,6 +1250,10 @@ class WorkflowManager:
1259
1250
  pass
1260
1251
 
1261
1252
  # Use the standalone request to save the workflow file
1253
+ # Use pickle_control_flow_result from request if provided, otherwise use False (default)
1254
+ pickle_control_flow_result = (
1255
+ request.pickle_control_flow_result if request.pickle_control_flow_result is not None else False
1256
+ )
1262
1257
  save_file_request = SaveWorkflowFileFromSerializedFlowRequest(
1263
1258
  serialized_flow_commands=serialized_flow_commands,
1264
1259
  file_name=file_name,
@@ -1268,6 +1263,7 @@ class WorkflowManager:
1268
1263
  branched_from=branched_from,
1269
1264
  workflow_shape=workflow_shape,
1270
1265
  file_path=str(file_path),
1266
+ pickle_control_flow_result=pickle_control_flow_result,
1271
1267
  )
1272
1268
  save_file_result = self.on_save_workflow_file_from_serialized_flow_request(save_file_request)
1273
1269
 
@@ -1297,6 +1293,97 @@ class WorkflowManager:
1297
1293
  file_path=save_file_result.file_path, result_details=ResultDetails(message=details, level=logging.INFO)
1298
1294
  )
1299
1295
 
1296
+ def _determine_save_target(
1297
+ self, requested_file_name: str | None, current_workflow_name: str | None
1298
+ ) -> SaveWorkflowTargetInfo:
1299
+ """Determine the target file path, name, and metadata for saving a workflow.
1300
+
1301
+ Args:
1302
+ requested_file_name: The name the user wants to save as (can be None)
1303
+ current_workflow_name: The workflow currently loaded in context (can be None)
1304
+
1305
+ Returns:
1306
+ SaveWorkflowTargetInfo with all information needed to save the workflow
1307
+
1308
+ Raises:
1309
+ ValueError: If workflow registry lookups fail or produce inconsistent state
1310
+ """
1311
+ # Look up workflows in registry
1312
+ target_workflow = None
1313
+ if requested_file_name and WorkflowRegistry.has_workflow_with_name(requested_file_name):
1314
+ target_workflow = WorkflowRegistry.get_workflow_by_name(requested_file_name)
1315
+
1316
+ current_workflow = None
1317
+ if current_workflow_name and WorkflowRegistry.has_workflow_with_name(current_workflow_name):
1318
+ current_workflow = WorkflowRegistry.get_workflow_by_name(current_workflow_name)
1319
+
1320
+ # Determine scenario and build target info
1321
+ if (target_workflow and target_workflow.metadata.is_template) or (
1322
+ current_workflow and current_workflow.metadata.is_template
1323
+ ):
1324
+ # Template workflows always create new copies with unique names
1325
+ scenario = WorkflowManager.SaveWorkflowScenario.SAVE_FROM_TEMPLATE
1326
+ template_workflow = target_workflow or current_workflow
1327
+ if template_workflow is None:
1328
+ msg = "Save From Template scenario requires either target_workflow or current_workflow to be present"
1329
+ raise ValueError(msg)
1330
+ base_name = requested_file_name if requested_file_name else template_workflow.metadata.name
1331
+
1332
+ # Find unique filename
1333
+ curr_idx = 1
1334
+ while True:
1335
+ candidate_name = f"{base_name}_{curr_idx}"
1336
+ candidate_path = GriptapeNodes.ConfigManager().workspace_path.joinpath(f"{candidate_name}.py")
1337
+ if not candidate_path.exists():
1338
+ break
1339
+ curr_idx += 1
1340
+
1341
+ file_name = candidate_name
1342
+ creation_date = datetime.now(tz=UTC)
1343
+ branched_from = None
1344
+ relative_file_path = f"{file_name}.py"
1345
+ file_path = GriptapeNodes.ConfigManager().workspace_path.joinpath(relative_file_path)
1346
+
1347
+ elif target_workflow:
1348
+ # Requested name exists in registry → overwrite it
1349
+ scenario = WorkflowManager.SaveWorkflowScenario.OVERWRITE_EXISTING
1350
+ file_name = target_workflow.metadata.name
1351
+ creation_date = target_workflow.metadata.creation_date
1352
+ branched_from = target_workflow.metadata.branched_from
1353
+ relative_file_path = target_workflow.file_path
1354
+ file_path = Path(WorkflowRegistry.get_complete_file_path(relative_file_path))
1355
+
1356
+ elif requested_file_name and current_workflow:
1357
+ # Requested name doesn't exist but we have a current workflow → Save As
1358
+ scenario = WorkflowManager.SaveWorkflowScenario.SAVE_AS
1359
+ file_name = requested_file_name
1360
+ creation_date = current_workflow.metadata.creation_date
1361
+ branched_from = current_workflow.metadata.branched_from
1362
+ relative_file_path = f"{file_name}.py"
1363
+ file_path = GriptapeNodes.ConfigManager().workspace_path.joinpath(relative_file_path)
1364
+
1365
+ else:
1366
+ # No requested name or no current workflow → first save
1367
+ scenario = WorkflowManager.SaveWorkflowScenario.FIRST_SAVE
1368
+ file_name = requested_file_name if requested_file_name else datetime.now(tz=UTC).strftime("%d.%m_%H.%M")
1369
+ creation_date = datetime.now(tz=UTC)
1370
+ branched_from = None
1371
+ relative_file_path = f"{file_name}.py"
1372
+ file_path = GriptapeNodes.ConfigManager().workspace_path.joinpath(relative_file_path)
1373
+
1374
+ # Ensure creation date is valid (backcompat)
1375
+ if (creation_date is None) or (creation_date == WorkflowManager.EPOCH_START):
1376
+ creation_date = datetime.now(tz=UTC)
1377
+
1378
+ return WorkflowManager.SaveWorkflowTargetInfo(
1379
+ scenario=scenario,
1380
+ file_name=file_name,
1381
+ file_path=file_path,
1382
+ relative_file_path=relative_file_path,
1383
+ creation_date=creation_date,
1384
+ branched_from=branched_from,
1385
+ )
1386
+
1300
1387
  def on_save_workflow_file_from_serialized_flow_request(
1301
1388
  self, request: SaveWorkflowFileFromSerializedFlowRequest
1302
1389
  ) -> ResultPayload:
@@ -1340,6 +1427,7 @@ class WorkflowManager:
1340
1427
  serialized_flow_commands=request.serialized_flow_commands,
1341
1428
  workflow_metadata=workflow_metadata,
1342
1429
  execution_flow_name=execution_flow_name,
1430
+ pickle_control_flow_result=request.pickle_control_flow_result,
1343
1431
  )
1344
1432
  except Exception as err:
1345
1433
  details = f"Attempted to save workflow file '{request.file_name}' from serialized flow commands. Failed during content generation: {err}"
@@ -1387,6 +1475,7 @@ class WorkflowManager:
1387
1475
  schema_version=WorkflowMetadata.LATEST_SCHEMA_VERSION,
1388
1476
  engine_version_created_with=engine_version,
1389
1477
  node_libraries_referenced=list(serialized_flow_commands.node_dependencies.libraries),
1478
+ node_types_used=serialized_flow_commands.node_types_used,
1390
1479
  workflows_referenced=workflows_referenced,
1391
1480
  creation_date=creation_date,
1392
1481
  last_modified_date=datetime.now(tz=UTC),
@@ -1400,6 +1489,8 @@ class WorkflowManager:
1400
1489
  serialized_flow_commands: SerializedFlowCommands,
1401
1490
  workflow_metadata: WorkflowMetadata,
1402
1491
  execution_flow_name: str,
1492
+ *,
1493
+ pickle_control_flow_result: bool = False,
1403
1494
  ) -> str:
1404
1495
  """Generate workflow file content from serialized commands and metadata."""
1405
1496
  metadata_block = self._generate_workflow_metadata_header(workflow_metadata=workflow_metadata)
@@ -1530,6 +1621,7 @@ class WorkflowManager:
1530
1621
  flow_name=execution_flow_name,
1531
1622
  import_recorder=import_recorder,
1532
1623
  workflow_metadata=workflow_metadata,
1624
+ pickle_control_flow_result=pickle_control_flow_result,
1533
1625
  )
1534
1626
  if workflow_execution_code is not None:
1535
1627
  for node in workflow_execution_code:
@@ -1600,6 +1692,8 @@ class WorkflowManager:
1600
1692
  flow_name: str,
1601
1693
  import_recorder: ImportRecorder,
1602
1694
  workflow_metadata: WorkflowMetadata,
1695
+ *,
1696
+ pickle_control_flow_result: bool = False,
1603
1697
  ) -> list[ast.AST] | None:
1604
1698
  """Generates execute_workflow(...) and the __main__ guard."""
1605
1699
  # Use workflow shape from metadata if available, otherwise skip execution block
@@ -1625,7 +1719,7 @@ class WorkflowManager:
1625
1719
  )
1626
1720
  import_recorder.add_from_import("griptape_nodes.drivers.storage.storage_backend", "StorageBackend")
1627
1721
 
1628
- # === 1) build the `def execute_workflow(input: dict, storage_backend: str = StorageBackend.LOCAL, workflow_executor: WorkflowExecutor | None = None) -> dict | None:` ===
1722
+ # === 1) build the `def execute_workflow(input: dict, storage_backend: str = StorageBackend.LOCAL, workflow_executor: WorkflowExecutor | None = None, pickle_control_flow_result: bool = False) -> dict | None:` ===
1629
1723
  # args
1630
1724
  arg_input = ast.arg(arg="input", annotation=ast.Name(id="dict", ctx=ast.Load()))
1631
1725
  arg_storage_backend = ast.arg(arg="storage_backend", annotation=ast.Name(id="str", ctx=ast.Load()))
@@ -1637,14 +1731,21 @@ class WorkflowManager:
1637
1731
  right=ast.Constant(value=None),
1638
1732
  ),
1639
1733
  )
1734
+ arg_pickle_control_flow_result = ast.arg(
1735
+ arg="pickle_control_flow_result", annotation=ast.Name(id="bool", ctx=ast.Load())
1736
+ )
1640
1737
  args = ast.arguments(
1641
1738
  posonlyargs=[],
1642
- args=[arg_input, arg_storage_backend, arg_workflow_executor],
1739
+ args=[arg_input, arg_storage_backend, arg_workflow_executor, arg_pickle_control_flow_result],
1643
1740
  vararg=None,
1644
1741
  kwonlyargs=[],
1645
1742
  kw_defaults=[],
1646
1743
  kwarg=None,
1647
- defaults=[ast.Constant(StorageBackend.LOCAL.value), ast.Constant(value=None)],
1744
+ defaults=[
1745
+ ast.Constant(StorageBackend.LOCAL.value),
1746
+ ast.Constant(value=None),
1747
+ ast.Constant(value=pickle_control_flow_result),
1748
+ ],
1648
1749
  )
1649
1750
  # return annotation: dict | None
1650
1751
  return_annotation = ast.BinOp(
@@ -1706,6 +1807,10 @@ class WorkflowManager:
1706
1807
  keywords=[
1707
1808
  ast.keyword(arg="workflow_name", value=ast.Constant(flow_name)),
1708
1809
  ast.keyword(arg="flow_input", value=ast.Name(id="input", ctx=ast.Load())),
1810
+ ast.keyword(
1811
+ arg="pickle_control_flow_result",
1812
+ value=ast.Name(id="pickle_control_flow_result", ctx=ast.Load()),
1813
+ ),
1709
1814
  ],
1710
1815
  )
1711
1816
  )
@@ -1755,6 +1860,10 @@ class WorkflowManager:
1755
1860
  ast.keyword(
1756
1861
  arg="workflow_executor", value=ast.Name(id="workflow_executor", ctx=ast.Load())
1757
1862
  ),
1863
+ ast.keyword(
1864
+ arg="pickle_control_flow_result",
1865
+ value=ast.Name(id="pickle_control_flow_result", ctx=ast.Load()),
1866
+ ),
1758
1867
  ],
1759
1868
  )
1760
1869
  ],
@@ -2768,6 +2877,7 @@ class WorkflowManager:
2768
2877
  ) -> list[ast.stmt]:
2769
2878
  # Ensure necessary imports are recorded
2770
2879
  import_recorder.add_from_import("griptape_nodes.node_library.library_registry", "NodeMetadata")
2880
+ import_recorder.add_from_import("griptape_nodes.node_library.library_registry", "NodeDeprecationMetadata")
2771
2881
  import_recorder.add_from_import("griptape_nodes.node_library.library_registry", "IconVariant")
2772
2882
  import_recorder.add_from_import("griptape_nodes.retained_mode.events.node_events", "CreateNodeRequest")
2773
2883
  import_recorder.add_from_import(
@@ -3167,7 +3277,10 @@ class WorkflowManager:
3167
3277
  return minimal_dict
3168
3278
 
3169
3279
  def _create_workflow_shape_from_nodes(
3170
- self, nodes: Sequence[BaseNode], workflow_shape: dict[str, Any], workflow_shape_type: str
3280
+ self,
3281
+ nodes: Sequence[BaseNode],
3282
+ workflow_shape: dict[str, Any],
3283
+ workflow_shape_type: str,
3171
3284
  ) -> dict[str, Any]:
3172
3285
  """Creates a workflow shape from the nodes.
3173
3286
 
@@ -3226,10 +3339,14 @@ class WorkflowManager:
3226
3339
 
3227
3340
  # Now, we need to gather the input and output parameters for each node type.
3228
3341
  workflow_shape = self._create_workflow_shape_from_nodes(
3229
- nodes=start_nodes, workflow_shape=workflow_shape, workflow_shape_type="input"
3342
+ nodes=start_nodes,
3343
+ workflow_shape=workflow_shape,
3344
+ workflow_shape_type="input",
3230
3345
  )
3231
3346
  workflow_shape = self._create_workflow_shape_from_nodes(
3232
- nodes=end_nodes, workflow_shape=workflow_shape, workflow_shape_type="output"
3347
+ nodes=end_nodes,
3348
+ workflow_shape=workflow_shape,
3349
+ workflow_shape_type="output",
3233
3350
  )
3234
3351
 
3235
3352
  return workflow_shape
@@ -3290,6 +3407,10 @@ class WorkflowManager:
3290
3407
  msg = f"No publishing handler found for '{publisher_name}' in request type '{type(request).__name__}'."
3291
3408
  raise ValueError(msg) # noqa: TRY301
3292
3409
 
3410
+ # Save the workflow before publishing to ensure the latest changes in memory are included
3411
+ workflow = WorkflowRegistry.get_workflow_by_name(request.workflow_name)
3412
+ await GriptapeNodes.ahandle_request(SaveWorkflowRequest(file_name=Path(workflow.file_path).stem))
3413
+
3293
3414
  result = await asyncio.to_thread(publishing_handler.handler, request)
3294
3415
  if isinstance(result, PublishWorkflowResultSuccess):
3295
3416
  workflow_file = Path(result.published_workflow_file_path)
@@ -3457,9 +3578,13 @@ class WorkflowManager:
3457
3578
 
3458
3579
  # Check workflow version - Schema version 0.6.0+ required for referenced workflow imports
3459
3580
  # (workflow schema was fixed in 0.6.0 to support importing workflows)
3460
- required_version = Version(major=0, minor=6, patch=0)
3461
- workflow_version = Version.from_string(workflow.metadata.schema_version)
3462
- if workflow_version is None or workflow_version < required_version:
3581
+ required_version = semver.VersionInfo(major=0, minor=6, patch=0)
3582
+ try:
3583
+ workflow_version = semver.VersionInfo.parse(workflow.metadata.schema_version)
3584
+ except Exception as e:
3585
+ details = f"Attempted to import workflow '{request.workflow_name}' as referenced sub flow. Failed because workflow version '{workflow.metadata.schema_version}' caused an error: {e}"
3586
+ return ImportWorkflowAsReferencedSubFlowResultFailure(result_details=details)
3587
+ if workflow_version < required_version:
3463
3588
  details = f"Attempted to import workflow '{request.workflow_name}' as referenced sub flow. Failed because workflow version '{workflow.metadata.schema_version}' is less than required version '0.6.0'. To remedy, open the workflow you are attempting to import and save it again to upgrade it to the latest version."
3464
3589
  return ImportWorkflowAsReferencedSubFlowResultFailure(result_details=details)
3465
3590
 
@@ -3574,6 +3699,7 @@ class WorkflowManager:
3574
3699
  schema_version=source_workflow.metadata.schema_version,
3575
3700
  engine_version_created_with=source_workflow.metadata.engine_version_created_with,
3576
3701
  node_libraries_referenced=source_workflow.metadata.node_libraries_referenced.copy(),
3702
+ node_types_used=source_workflow.metadata.node_types_used.copy(),
3577
3703
  workflows_referenced=source_workflow.metadata.workflows_referenced.copy()
3578
3704
  if source_workflow.metadata.workflows_referenced
3579
3705
  else None,
@@ -3657,6 +3783,7 @@ class WorkflowManager:
3657
3783
  schema_version=source_workflow.metadata.schema_version,
3658
3784
  engine_version_created_with=source_workflow.metadata.engine_version_created_with,
3659
3785
  node_libraries_referenced=source_workflow.metadata.node_libraries_referenced.copy(),
3786
+ node_types_used=source_workflow.metadata.node_types_used.copy(),
3660
3787
  workflows_referenced=source_workflow.metadata.workflows_referenced.copy()
3661
3788
  if source_workflow.metadata.workflows_referenced
3662
3789
  else None,
@@ -3744,6 +3871,7 @@ class WorkflowManager:
3744
3871
  schema_version=source_workflow.metadata.schema_version,
3745
3872
  engine_version_created_with=source_workflow.metadata.engine_version_created_with,
3746
3873
  node_libraries_referenced=source_workflow.metadata.node_libraries_referenced.copy(),
3874
+ node_types_used=source_workflow.metadata.node_types_used.copy(),
3747
3875
  workflows_referenced=source_workflow.metadata.workflows_referenced.copy()
3748
3876
  if source_workflow.metadata.workflows_referenced
3749
3877
  else None,
@@ -16,6 +16,7 @@ from pydantic import TypeAdapter
16
16
  from rich.logging import RichHandler
17
17
  from starlette.types import Receive, Scope, Send
18
18
 
19
+ from griptape_nodes.api_client import Client, RequestClient
19
20
  from griptape_nodes.retained_mode.events.base_events import RequestPayload
20
21
  from griptape_nodes.retained_mode.events.connection_events import (
21
22
  CreateConnectionRequest,
@@ -37,7 +38,6 @@ from griptape_nodes.retained_mode.events.parameter_events import (
37
38
  )
38
39
  from griptape_nodes.retained_mode.managers.config_manager import ConfigManager
39
40
  from griptape_nodes.retained_mode.managers.secrets_manager import SecretsManager
40
- from griptape_nodes.servers.ws_request_manager import AsyncRequestManager, WebSocketConnectionManager
41
41
 
42
42
  SUPPORTED_REQUEST_EVENTS: dict[str, type[RequestPayload]] = {
43
43
  # Nodes
@@ -72,12 +72,12 @@ mcp_server_logger.setLevel(logging.INFO)
72
72
  def start_mcp_server(api_key: str) -> None:
73
73
  """Synchronous version of main entry point for the Griptape Nodes MCP server."""
74
74
  mcp_server_logger.debug("Starting MCP GTN server...")
75
- # Give these a session ID
76
- connection_manager = WebSocketConnectionManager()
77
- request_manager = AsyncRequestManager(connection_manager, api_key)
78
75
 
79
76
  app = Server("mcp-gtn")
80
77
 
78
+ # Manager reference to be set in lifespan
79
+ manager: RequestClient | None = None
80
+
81
81
  @app.list_tools()
82
82
  async def list_tools() -> list[Tool]:
83
83
  return [
@@ -87,16 +87,17 @@ def start_mcp_server(api_key: str) -> None:
87
87
 
88
88
  @app.call_tool()
89
89
  async def call_tool(name: str, arguments: dict) -> list[TextContent]:
90
+ if manager is None:
91
+ msg = "Request manager not initialized"
92
+ raise RuntimeError(msg)
93
+
90
94
  if name not in SUPPORTED_REQUEST_EVENTS:
91
95
  msg = f"Unsupported tool: {name}"
92
96
  raise ValueError(msg)
93
97
 
94
98
  request_payload = SUPPORTED_REQUEST_EVENTS[name](**arguments)
95
99
 
96
- await request_manager.connect()
97
- result = await request_manager.create_request_event(
98
- request_payload.__class__.__name__, request_payload.__dict__, timeout_ms=5000
99
- )
100
+ result = await manager.request(request_payload.__class__.__name__, request_payload.__dict__, timeout_ms=5000)
100
101
  mcp_server_logger.debug("Got result: %s", result)
101
102
 
102
103
  return [TextContent(type="text", text=json.dumps(result))]
@@ -108,13 +109,20 @@ def start_mcp_server(api_key: str) -> None:
108
109
 
109
110
  @contextlib.asynccontextmanager
110
111
  async def lifespan(_: FastAPI) -> AsyncIterator[None]:
111
- """Context manager for managing session manager lifecycle."""
112
- async with session_manager.run():
113
- mcp_server_logger.debug("GTN MCP server started with StreamableHTTP session manager!")
114
- try:
115
- yield
116
- finally:
117
- mcp_server_logger.debug("GTN MCP server shutting down...")
112
+ """Context manager for managing session manager and WebSocket client lifecycle."""
113
+ nonlocal manager
114
+
115
+ async with Client(api_key=api_key) as ws_client, RequestClient(client=ws_client) as req_manager:
116
+ manager = req_manager
117
+ mcp_server_logger.debug("Request manager initialized")
118
+
119
+ async with session_manager.run():
120
+ mcp_server_logger.debug("GTN MCP server started with StreamableHTTP session manager!")
121
+ try:
122
+ yield
123
+ finally:
124
+ mcp_server_logger.debug("GTN MCP server shutting down...")
125
+ manager = None
118
126
 
119
127
  mcp_server_app = FastAPI(lifespan=lifespan)
120
128
 
@@ -31,6 +31,42 @@ async def call_function(func: Callable[..., Any], *args: Any, **kwargs: Any) ->
31
31
  return func(*args, **kwargs)
32
32
 
33
33
 
34
+ async def to_thread(func: Callable[..., Any], *args: Any, **kwargs: Any) -> Any:
35
+ """Run a synchronous function in a thread pool.
36
+
37
+ Differs from `asyncio.to_thread` by waiting for the thread to complete even if the calling coroutine is cancelled.
38
+
39
+ CONCURRENCY IS HARD
40
+ If the coroutine calling `to_thread` is cancelled, the `await` before `asyncio.to_thread` raises CancelledError,
41
+ But the shielded task itself is not cancelled and continues running in the thread.
42
+ This allows us to wait for it to complete and get the result.
43
+
44
+ References:
45
+ https://docs.python.org/3/library/asyncio-task.html#shielding-from-cancellation
46
+ https://trio.readthedocs.io/en/stable/reference-core.html#trio.to_thread.run_sync
47
+
48
+ Args:
49
+ func: The synchronous function to run in a thread
50
+ *args: Positional arguments to pass to the function
51
+ **kwargs: Keyword arguments to pass to the function
52
+
53
+ Returns:
54
+ The result of the function call
55
+
56
+ Raises:
57
+ asyncio.CancelledError: After waiting for the thread to complete
58
+ """
59
+ task = asyncio.create_task(asyncio.to_thread(func, *args, **kwargs))
60
+ try:
61
+ task_result = await asyncio.shield(task)
62
+ except asyncio.CancelledError:
63
+ # Wait for the task to finish if it was already running
64
+ task_result = await task
65
+ raise
66
+
67
+ return task_result
68
+
69
+
34
70
  async def subprocess_run(
35
71
  args: Sequence[str],
36
72
  *,
@@ -122,7 +122,7 @@ def _convert_sequence_to_dict(sequence: list | tuple) -> dict:
122
122
  return result
123
123
 
124
124
 
125
- def merge_dicts(dct: dict | None, merge_dct: dict | None, *, add_keys: bool = True) -> dict:
125
+ def merge_dicts(dct: dict | None, merge_dct: dict | None, *, add_keys: bool = True, merge_lists: bool = False) -> dict:
126
126
  """Recursive dict merge.
127
127
 
128
128
  Inspired by :meth:``dict.update()``, instead of
@@ -137,10 +137,14 @@ def merge_dicts(dct: dict | None, merge_dct: dict | None, *, add_keys: bool = Tr
137
137
  present in ``merge_dict`` but not ``dct`` should be included in the
138
138
  new dict.
139
139
 
140
+ The optional argument ``merge_lists``, determines whether list values
141
+ should be merged (combined) instead of replaced.
142
+
140
143
  Args:
141
144
  dct: onto which the merge is executed
142
145
  merge_dct: dct merged into dct
143
146
  add_keys: whether to add new keys
147
+ merge_lists: whether to merge list values instead of replacing them
144
148
 
145
149
  Returns:
146
150
  dict: updated dict
@@ -155,7 +159,9 @@ def merge_dicts(dct: dict | None, merge_dct: dict | None, *, add_keys: bool = Tr
155
159
 
156
160
  for key in merge_dct:
157
161
  if key in dct and isinstance(dct[key], dict):
158
- dct[key] = merge_dicts(dct[key], merge_dct[key], add_keys=add_keys)
162
+ dct[key] = merge_dicts(dct[key], merge_dct[key], add_keys=add_keys, merge_lists=merge_lists)
163
+ elif merge_lists and key in dct and isinstance(dct[key], list) and isinstance(merge_dct[key], list):
164
+ dct[key] = list(set(dct[key] + merge_dct[key]))
159
165
  else:
160
166
  dct[key] = merge_dct[key]
161
167