griptape-nodes 0.38.0__py3-none-any.whl → 0.40.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. griptape_nodes/__init__.py +13 -9
  2. griptape_nodes/app/__init__.py +10 -1
  3. griptape_nodes/app/app.py +2 -3
  4. griptape_nodes/app/app_sessions.py +458 -0
  5. griptape_nodes/bootstrap/workflow_executors/__init__.py +1 -0
  6. griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +213 -0
  7. griptape_nodes/bootstrap/workflow_executors/workflow_executor.py +13 -0
  8. griptape_nodes/bootstrap/workflow_runners/local_workflow_runner.py +1 -1
  9. griptape_nodes/drivers/storage/__init__.py +4 -0
  10. griptape_nodes/drivers/storage/storage_backend.py +10 -0
  11. griptape_nodes/exe_types/core_types.py +5 -1
  12. griptape_nodes/exe_types/node_types.py +20 -24
  13. griptape_nodes/machines/node_resolution.py +5 -1
  14. griptape_nodes/node_library/advanced_node_library.py +51 -0
  15. griptape_nodes/node_library/library_registry.py +28 -2
  16. griptape_nodes/node_library/workflow_registry.py +1 -1
  17. griptape_nodes/retained_mode/events/agent_events.py +15 -2
  18. griptape_nodes/retained_mode/events/app_events.py +113 -2
  19. griptape_nodes/retained_mode/events/base_events.py +28 -1
  20. griptape_nodes/retained_mode/events/library_events.py +111 -1
  21. griptape_nodes/retained_mode/events/workflow_events.py +1 -0
  22. griptape_nodes/retained_mode/griptape_nodes.py +240 -18
  23. griptape_nodes/retained_mode/managers/agent_manager.py +123 -17
  24. griptape_nodes/retained_mode/managers/flow_manager.py +16 -48
  25. griptape_nodes/retained_mode/managers/library_manager.py +642 -121
  26. griptape_nodes/retained_mode/managers/node_manager.py +1 -1
  27. griptape_nodes/retained_mode/managers/static_files_manager.py +4 -3
  28. griptape_nodes/retained_mode/managers/workflow_manager.py +666 -37
  29. griptape_nodes/retained_mode/utils/__init__.py +1 -0
  30. griptape_nodes/retained_mode/utils/engine_identity.py +131 -0
  31. griptape_nodes/retained_mode/utils/name_generator.py +162 -0
  32. griptape_nodes/retained_mode/utils/session_persistence.py +105 -0
  33. {griptape_nodes-0.38.0.dist-info → griptape_nodes-0.40.0.dist-info}/METADATA +1 -1
  34. {griptape_nodes-0.38.0.dist-info → griptape_nodes-0.40.0.dist-info}/RECORD +37 -27
  35. {griptape_nodes-0.38.0.dist-info → griptape_nodes-0.40.0.dist-info}/WHEEL +0 -0
  36. {griptape_nodes-0.38.0.dist-info → griptape_nodes-0.40.0.dist-info}/entry_points.txt +0 -0
  37. {griptape_nodes-0.38.0.dist-info → griptape_nodes-0.40.0.dist-info}/licenses/LICENSE +0 -0
@@ -31,6 +31,7 @@ from rich.table import Table
31
31
  from rich.text import Text
32
32
  from xdg_base_dirs import xdg_config_home
33
33
 
34
+ from griptape_nodes.drivers.storage import StorageBackend
34
35
  from griptape_nodes.exe_types.core_types import ParameterTypeBuiltin
35
36
  from griptape_nodes.exe_types.node_types import BaseNode, EndNode, StartNode
36
37
  from griptape_nodes.node_library.library_registry import LibraryNameAndVersion, LibraryRegistry
@@ -51,9 +52,6 @@ from griptape_nodes.retained_mode.events.flow_events import (
51
52
  from griptape_nodes.retained_mode.events.library_events import (
52
53
  GetLibraryMetadataRequest,
53
54
  GetLibraryMetadataResultSuccess,
54
- ListRegisteredLibrariesRequest,
55
- ListRegisteredLibrariesResultSuccess,
56
- UnloadLibraryFromRegistryRequest,
57
55
  )
58
56
  from griptape_nodes.retained_mode.events.object_events import ClearAllObjectStateRequest
59
57
  from griptape_nodes.retained_mode.events.secrets_events import (
@@ -98,7 +96,7 @@ from griptape_nodes.retained_mode.griptape_nodes import (
98
96
  )
99
97
 
100
98
  if TYPE_CHECKING:
101
- from collections.abc import Sequence
99
+ from collections.abc import Callable, Sequence
102
100
  from types import TracebackType
103
101
 
104
102
  from griptape_nodes.exe_types.core_types import Parameter
@@ -376,10 +374,7 @@ class WorkflowManager:
376
374
  else:
377
375
  complete_file_path = WorkflowRegistry.get_complete_file_path(relative_file_path=relative_file_path)
378
376
  try:
379
- # TODO: scope the libraries loaded to JUST those used by this workflow, eventually: https://github.com/griptape-ai/griptape-nodes/issues/284
380
- # Load (or reload, which should trigger a hot reload) all libraries
381
- GriptapeNodes.LibraryManager().load_all_libraries_from_config()
382
-
377
+ # Libraries are now loaded only on app initialization and explicit reload requests
383
378
  # Now execute the workflow.
384
379
  with Path(complete_file_path).open(encoding="utf-8") as file:
385
380
  workflow_content = file.read()
@@ -464,22 +459,6 @@ class WorkflowManager:
464
459
  logger.error(details)
465
460
  return RunWorkflowFromRegistryResultFailure()
466
461
 
467
- # Unload all libraries now.
468
- all_libraries_request = ListRegisteredLibrariesRequest()
469
- all_libraries_result = GriptapeNodes.handle_request(all_libraries_request)
470
- if not isinstance(all_libraries_result, ListRegisteredLibrariesResultSuccess):
471
- details = f"When preparing to run a workflow '{request.workflow_name}', failed to get registered libraries."
472
- logger.error(details)
473
- return RunWorkflowFromRegistryResultFailure()
474
-
475
- for library_name in all_libraries_result.libraries:
476
- unload_library_request = UnloadLibraryFromRegistryRequest(library_name=library_name)
477
- unload_library_result = GriptapeNodes.handle_request(unload_library_request)
478
- if not unload_library_result.succeeded():
479
- details = f"When preparing to run a workflow '{request.workflow_name}', failed to unload library '{library_name}'."
480
- logger.error(details)
481
- return RunWorkflowFromRegistryResultFailure()
482
-
483
462
  # Let's run under the assumption that this Workflow will become our Current Context; if we fail, it will revert.
484
463
  GriptapeNodes.ContextManager().push_workflow(request.workflow_name)
485
464
  # run file
@@ -882,6 +861,7 @@ class WorkflowManager:
882
861
  return import_statements
883
862
 
884
863
  def on_save_workflow_request(self, request: SaveWorkflowRequest) -> ResultPayload: # noqa: C901, PLR0911, PLR0912, PLR0915
864
+ logger.debug("SaveWorkflowRequest payload: %r (image_path: %r)", request, getattr(request, "image_path", None))
885
865
  local_tz = datetime.now().astimezone().tzinfo
886
866
 
887
867
  # Start with the file name provided; we may change it.
@@ -980,6 +960,10 @@ class WorkflowManager:
980
960
  logger.error(details)
981
961
  return SaveWorkflowResultFailure()
982
962
 
963
+ # Set the image if provided
964
+ if request.image_path:
965
+ workflow_metadata.image = request.image_path
966
+
983
967
  metadata_block = self._generate_workflow_metadata_header(workflow_metadata=workflow_metadata)
984
968
  if metadata_block is None:
985
969
  details = f"Attempted to save workflow '{relative_file_path}'. Failed to generate metadata block."
@@ -991,6 +975,12 @@ class WorkflowManager:
991
975
 
992
976
  ast_container = ASTContainer()
993
977
 
978
+ prereq_code = self._generate_workflow_run_prerequisite_code(
979
+ workflow_name=workflow_metadata.name, import_recorder=import_recorder
980
+ )
981
+ for node in prereq_code:
982
+ ast_container.add_node(node)
983
+
994
984
  # Generate unique values code AST node.
995
985
  unique_values_node = self._generate_unique_values_code(
996
986
  unique_parameter_uuid_to_values=serialized_flow_commands.unique_parameter_uuid_to_values,
@@ -1047,12 +1037,24 @@ class WorkflowManager:
1047
1037
  )
1048
1038
  ast_container.nodes.extend(set_parameter_value_asts)
1049
1039
 
1040
+ workflow_execution_code = (
1041
+ self._generate_workflow_execution(
1042
+ flow_name=top_level_flow_name,
1043
+ import_recorder=import_recorder,
1044
+ )
1045
+ if top_level_flow_name
1046
+ else None
1047
+ )
1048
+ if workflow_execution_code is not None:
1049
+ for node in workflow_execution_code:
1050
+ ast_container.add_node(node)
1051
+
1050
1052
  # TODO: https://github.com/griptape-ai/griptape-nodes/issues/1190 do child workflows
1051
1053
 
1052
1054
  # Generate final code from ASTContainer
1053
1055
  ast_output = "\n\n".join([ast.unparse(node) for node in ast_container.get_ast()])
1054
1056
  import_output = import_recorder.generate_imports()
1055
- final_code_output = f"{metadata_block}\n\n{import_output}\n\n{ast_output}"
1057
+ final_code_output = f"{metadata_block}\n\n{import_output}\n\n{ast_output}\n"
1056
1058
 
1057
1059
  # Create the pathing and write the file
1058
1060
  file_path.parent.mkdir(parents=True, exist_ok=True)
@@ -1122,6 +1124,466 @@ class WorkflowManager:
1122
1124
 
1123
1125
  return metadata_block
1124
1126
 
1127
+ def _generate_workflow_execution(
1128
+ self,
1129
+ flow_name: str,
1130
+ import_recorder: ImportRecorder,
1131
+ ) -> list[ast.AST] | None:
1132
+ """Generates execute_workflow(...) and the __main__ guard."""
1133
+ try:
1134
+ workflow_shape = self._extract_workflow_shape(flow_name)
1135
+ except ValueError:
1136
+ logger.info("Workflow shape does not have required Start or End Nodes. Skipping local execution block.")
1137
+ return None
1138
+
1139
+ # === imports ===
1140
+ import_recorder.add_import("argparse")
1141
+ import_recorder.add_import("json")
1142
+ import_recorder.add_from_import(
1143
+ "griptape_nodes.bootstrap.workflow_executors.local_workflow_executor", "LocalWorkflowExecutor"
1144
+ )
1145
+
1146
+ # === 1) build the `def execute_workflow(input: dict, storage_backend: str = StorageBackend.LOCAL) -> dict | None:` ===
1147
+ # args
1148
+ arg_input = ast.arg(arg="input", annotation=ast.Name(id="dict", ctx=ast.Load()))
1149
+ arg_storage_backend = ast.arg(arg="storage_backend", annotation=ast.Name(id="str", ctx=ast.Load()))
1150
+ args = ast.arguments(
1151
+ posonlyargs=[],
1152
+ args=[arg_input, arg_storage_backend],
1153
+ vararg=None,
1154
+ kwonlyargs=[],
1155
+ kw_defaults=[],
1156
+ kwarg=None,
1157
+ defaults=[ast.Constant(StorageBackend.LOCAL.value)],
1158
+ )
1159
+ # return annotation: dict | None
1160
+ return_annotation = ast.BinOp(
1161
+ left=ast.Name(id="dict", ctx=ast.Load()),
1162
+ op=ast.BitOr(),
1163
+ right=ast.Constant(value=None),
1164
+ )
1165
+
1166
+ executor_assign = ast.Assign(
1167
+ targets=[ast.Name(id="workflow_executor", ctx=ast.Store())],
1168
+ value=ast.Call(
1169
+ func=ast.Name(id="LocalWorkflowExecutor", ctx=ast.Load()),
1170
+ args=[],
1171
+ keywords=[],
1172
+ ),
1173
+ )
1174
+ run_call = ast.Expr(
1175
+ value=ast.Call(
1176
+ func=ast.Attribute(
1177
+ value=ast.Name(id="workflow_executor", ctx=ast.Load()),
1178
+ attr="run",
1179
+ ctx=ast.Load(),
1180
+ ),
1181
+ args=[],
1182
+ keywords=[
1183
+ ast.keyword(arg="workflow_name", value=ast.Constant(flow_name)),
1184
+ ast.keyword(arg="flow_input", value=ast.Name(id="input", ctx=ast.Load())),
1185
+ ast.keyword(arg="storage_backend", value=ast.Name(id="storage_backend", ctx=ast.Load())),
1186
+ ],
1187
+ )
1188
+ )
1189
+ return_stmt = ast.Return(
1190
+ value=ast.Attribute(
1191
+ value=ast.Name(id="workflow_executor", ctx=ast.Load()),
1192
+ attr="output",
1193
+ ctx=ast.Load(),
1194
+ )
1195
+ )
1196
+
1197
+ func_def = ast.FunctionDef(
1198
+ name="execute_workflow",
1199
+ args=args,
1200
+ body=[executor_assign, run_call, return_stmt],
1201
+ decorator_list=[],
1202
+ returns=return_annotation,
1203
+ type_params=[],
1204
+ )
1205
+ ast.fix_missing_locations(func_def)
1206
+
1207
+ # === 2) build the `if __name__ == "__main__":` block ===
1208
+ main_test = ast.Compare(
1209
+ left=ast.Name(id="__name__", ctx=ast.Load()),
1210
+ ops=[ast.Eq()],
1211
+ comparators=[ast.Constant(value="__main__")],
1212
+ )
1213
+
1214
+ parser_assign = ast.Assign(
1215
+ targets=[ast.Name(id="parser", ctx=ast.Store())],
1216
+ value=ast.Call(
1217
+ func=ast.Attribute(
1218
+ value=ast.Name(id="argparse", ctx=ast.Load()),
1219
+ attr="ArgumentParser",
1220
+ ctx=ast.Load(),
1221
+ ),
1222
+ args=[],
1223
+ keywords=[],
1224
+ ),
1225
+ )
1226
+
1227
+ # Generate parser.add_argument(...) calls for each parameter in workflow_shape
1228
+ add_arg_calls = []
1229
+
1230
+ # Add storage backend argument
1231
+ add_arg_calls.append(
1232
+ ast.Expr(
1233
+ value=ast.Call(
1234
+ func=ast.Attribute(
1235
+ value=ast.Name(id="parser", ctx=ast.Load()),
1236
+ attr="add_argument",
1237
+ ctx=ast.Load(),
1238
+ ),
1239
+ args=[ast.Constant("--storage-backend")],
1240
+ keywords=[
1241
+ ast.keyword(
1242
+ arg="choices",
1243
+ value=ast.List(
1244
+ elts=[ast.Constant(StorageBackend.LOCAL.value), ast.Constant(StorageBackend.GTC.value)],
1245
+ ctx=ast.Load(),
1246
+ ),
1247
+ ),
1248
+ ast.keyword(arg="default", value=ast.Constant(StorageBackend.LOCAL.value)),
1249
+ ast.keyword(
1250
+ arg="help",
1251
+ value=ast.Constant(
1252
+ "Storage backend to use: 'local' for local filesystem or 'gtc' for Griptape Cloud"
1253
+ ),
1254
+ ),
1255
+ ],
1256
+ )
1257
+ )
1258
+ )
1259
+
1260
+ # Generate individual arguments for each parameter in workflow_shape["input"]
1261
+ if "input" in workflow_shape:
1262
+ for node_name, node_params in workflow_shape["input"].items():
1263
+ if isinstance(node_params, dict):
1264
+ for param_name, param_info in node_params.items():
1265
+ # Create CLI argument name: --{param_name}
1266
+ arg_name = f"--{param_name}".lower()
1267
+
1268
+ # Get help text from parameter info
1269
+ help_text = param_info.get("tooltip", f"Parameter {param_name} for node {node_name}")
1270
+
1271
+ add_arg_calls.append(
1272
+ ast.Expr(
1273
+ value=ast.Call(
1274
+ func=ast.Attribute(
1275
+ value=ast.Name(id="parser", ctx=ast.Load()),
1276
+ attr="add_argument",
1277
+ ctx=ast.Load(),
1278
+ ),
1279
+ args=[ast.Constant(arg_name)],
1280
+ keywords=[
1281
+ ast.keyword(arg="default", value=ast.Constant(None)),
1282
+ ast.keyword(arg="help", value=ast.Constant(help_text)),
1283
+ ],
1284
+ )
1285
+ )
1286
+ )
1287
+
1288
+ parse_args = ast.Assign(
1289
+ targets=[ast.Name(id="args", ctx=ast.Store())],
1290
+ value=ast.Call(
1291
+ func=ast.Attribute(
1292
+ value=ast.Name(id="parser", ctx=ast.Load()),
1293
+ attr="parse_args",
1294
+ ctx=ast.Load(),
1295
+ ),
1296
+ args=[],
1297
+ keywords=[],
1298
+ ),
1299
+ )
1300
+
1301
+ # Build flow_input dictionary from individual CLI arguments
1302
+ flow_input_init = ast.Assign(
1303
+ targets=[ast.Name(id="flow_input", ctx=ast.Store())],
1304
+ value=ast.Dict(keys=[], values=[]),
1305
+ )
1306
+
1307
+ # Build the flow_input dict structure from individual arguments
1308
+ build_flow_input_stmts = []
1309
+
1310
+ # For each node, ensure it exists in flow_input
1311
+ build_flow_input_stmts.extend(
1312
+ [
1313
+ ast.If(
1314
+ test=ast.Compare(
1315
+ left=ast.Constant(value=node_name),
1316
+ ops=[ast.NotIn()],
1317
+ comparators=[ast.Name(id="flow_input", ctx=ast.Load())],
1318
+ ),
1319
+ body=[
1320
+ ast.Assign(
1321
+ targets=[
1322
+ ast.Subscript(
1323
+ value=ast.Name(id="flow_input", ctx=ast.Load()),
1324
+ slice=ast.Constant(value=node_name),
1325
+ ctx=ast.Store(),
1326
+ )
1327
+ ],
1328
+ value=ast.Dict(keys=[], values=[]),
1329
+ )
1330
+ ],
1331
+ orelse=[],
1332
+ )
1333
+ for node_name in workflow_shape.get("input", {})
1334
+ ]
1335
+ )
1336
+
1337
+ # For each parameter, get its value from args and add to flow_input
1338
+ build_flow_input_stmts.extend(
1339
+ [
1340
+ ast.If(
1341
+ test=ast.Compare(
1342
+ left=ast.Attribute(
1343
+ value=ast.Name(id="args", ctx=ast.Load()),
1344
+ attr=param_name.lower(),
1345
+ ctx=ast.Load(),
1346
+ ),
1347
+ ops=[ast.IsNot()],
1348
+ comparators=[ast.Constant(value=None)],
1349
+ ),
1350
+ body=[
1351
+ ast.Assign(
1352
+ targets=[
1353
+ ast.Subscript(
1354
+ value=ast.Subscript(
1355
+ value=ast.Name(id="flow_input", ctx=ast.Load()),
1356
+ slice=ast.Constant(value=node_name),
1357
+ ctx=ast.Load(),
1358
+ ),
1359
+ slice=ast.Constant(value=param_name),
1360
+ ctx=ast.Store(),
1361
+ )
1362
+ ],
1363
+ value=ast.Attribute(
1364
+ value=ast.Name(id="args", ctx=ast.Load()),
1365
+ attr=param_name.lower(),
1366
+ ctx=ast.Load(),
1367
+ ),
1368
+ )
1369
+ ],
1370
+ orelse=[],
1371
+ )
1372
+ for node_name, node_params in workflow_shape.get("input", {}).items()
1373
+ if isinstance(node_params, dict)
1374
+ for param_name in node_params
1375
+ ]
1376
+ )
1377
+
1378
+ workflow_output = ast.Assign(
1379
+ targets=[ast.Name(id="workflow_output", ctx=ast.Store())],
1380
+ value=ast.Call(
1381
+ func=ast.Name(id="execute_workflow", ctx=ast.Load()),
1382
+ args=[],
1383
+ keywords=[
1384
+ ast.keyword(arg="input", value=ast.Name(id="flow_input", ctx=ast.Load())),
1385
+ ast.keyword(
1386
+ arg="storage_backend",
1387
+ value=ast.Attribute(
1388
+ value=ast.Name(id="args", ctx=ast.Load()),
1389
+ attr="storage_backend",
1390
+ ctx=ast.Load(),
1391
+ ),
1392
+ ),
1393
+ ],
1394
+ ),
1395
+ )
1396
+ print_output = ast.Expr(
1397
+ value=ast.Call(
1398
+ func=ast.Name(id="print", ctx=ast.Load()),
1399
+ args=[ast.Name(id="workflow_output", ctx=ast.Load())],
1400
+ keywords=[],
1401
+ )
1402
+ )
1403
+
1404
+ if_node = ast.If(
1405
+ test=main_test,
1406
+ body=[
1407
+ parser_assign,
1408
+ *add_arg_calls,
1409
+ parse_args,
1410
+ flow_input_init,
1411
+ *build_flow_input_stmts,
1412
+ workflow_output,
1413
+ print_output,
1414
+ ],
1415
+ orelse=[],
1416
+ )
1417
+ ast.fix_missing_locations(if_node)
1418
+
1419
+ return [func_def, if_node]
1420
+
1421
+ def _generate_workflow_run_prerequisite_code(
1422
+ self,
1423
+ workflow_name: str,
1424
+ import_recorder: ImportRecorder,
1425
+ ) -> list[ast.AST]:
1426
+ import_recorder.add_from_import(
1427
+ "griptape_nodes.retained_mode.events.library_events", "GetAllInfoForAllLibrariesRequest"
1428
+ )
1429
+ import_recorder.add_from_import(
1430
+ "griptape_nodes.retained_mode.events.library_events", "GetAllInfoForAllLibrariesResultSuccess"
1431
+ )
1432
+
1433
+ code_blocks: list[ast.AST] = []
1434
+
1435
+ response_assign = ast.Assign(
1436
+ targets=[ast.Name(id="response", ctx=ast.Store())],
1437
+ value=ast.Call(
1438
+ func=ast.Attribute(
1439
+ value=ast.Call(
1440
+ func=ast.Attribute(
1441
+ value=ast.Name(id="GriptapeNodes", ctx=ast.Load()),
1442
+ attr="LibraryManager",
1443
+ ctx=ast.Load(),
1444
+ ),
1445
+ args=[],
1446
+ keywords=[],
1447
+ ),
1448
+ attr="get_all_info_for_all_libraries_request",
1449
+ ctx=ast.Load(),
1450
+ ),
1451
+ args=[
1452
+ ast.Call(
1453
+ func=ast.Name(id="GetAllInfoForAllLibrariesRequest", ctx=ast.Load()),
1454
+ args=[],
1455
+ keywords=[],
1456
+ )
1457
+ ],
1458
+ keywords=[],
1459
+ ),
1460
+ )
1461
+ ast.fix_missing_locations(response_assign)
1462
+ code_blocks.append(response_assign)
1463
+
1464
+ isinstance_test = ast.Call(
1465
+ func=ast.Name(id="isinstance", ctx=ast.Load()),
1466
+ args=[
1467
+ ast.Name(id="response", ctx=ast.Load()),
1468
+ ast.Name(id="GetAllInfoForAllLibrariesResultSuccess", ctx=ast.Load()),
1469
+ ],
1470
+ keywords=[],
1471
+ )
1472
+ ast.fix_missing_locations(isinstance_test)
1473
+
1474
+ len_call = ast.Call(
1475
+ func=ast.Name(id="len", ctx=ast.Load()),
1476
+ args=[
1477
+ ast.Call(
1478
+ func=ast.Attribute(
1479
+ value=ast.Attribute(
1480
+ value=ast.Name(id="response", ctx=ast.Load()),
1481
+ attr="library_name_to_library_info",
1482
+ ctx=ast.Load(),
1483
+ ),
1484
+ attr="keys",
1485
+ ctx=ast.Load(),
1486
+ ),
1487
+ args=[],
1488
+ keywords=[],
1489
+ )
1490
+ ],
1491
+ keywords=[],
1492
+ )
1493
+ compare_len = ast.Compare(
1494
+ left=len_call,
1495
+ ops=[ast.Lt()],
1496
+ comparators=[ast.Constant(value=1)],
1497
+ )
1498
+ ast.fix_missing_locations(compare_len)
1499
+
1500
+ test = ast.BoolOp(
1501
+ op=ast.And(),
1502
+ values=[isinstance_test, compare_len],
1503
+ )
1504
+ ast.fix_missing_locations(test)
1505
+
1506
+ # 3) the body: GriptapeNodes.LibraryManager().load_all_libraries_from_config()
1507
+ # TODO (https://github.com/griptape-ai/griptape-nodes/issues/1615): Generate requests to load ONLY the libraries used in this workflow
1508
+ load_call = ast.Expr(
1509
+ value=ast.Call(
1510
+ func=ast.Attribute(
1511
+ value=ast.Call(
1512
+ func=ast.Attribute(
1513
+ value=ast.Name(id="GriptapeNodes", ctx=ast.Load()),
1514
+ attr="LibraryManager",
1515
+ ctx=ast.Load(),
1516
+ ),
1517
+ args=[],
1518
+ keywords=[],
1519
+ ),
1520
+ attr="load_all_libraries_from_config",
1521
+ ctx=ast.Load(),
1522
+ ),
1523
+ args=[],
1524
+ keywords=[],
1525
+ )
1526
+ )
1527
+ ast.fix_missing_locations(load_call)
1528
+
1529
+ # 4) assemble the `if` statement
1530
+ if_node = ast.If(
1531
+ test=test,
1532
+ body=[load_call],
1533
+ orelse=[],
1534
+ )
1535
+ ast.fix_missing_locations(if_node)
1536
+ code_blocks.append(if_node)
1537
+
1538
+ # 5) context_manager = GriptapeNodes.ContextManager()
1539
+ assign_context_manager = ast.Assign(
1540
+ targets=[ast.Name(id="context_manager", ctx=ast.Store())],
1541
+ value=ast.Call(
1542
+ func=ast.Attribute(
1543
+ value=ast.Name(id="GriptapeNodes", ctx=ast.Load()),
1544
+ attr="ContextManager",
1545
+ ctx=ast.Load(),
1546
+ ),
1547
+ args=[],
1548
+ keywords=[],
1549
+ ),
1550
+ )
1551
+ ast.fix_missing_locations(assign_context_manager)
1552
+ code_blocks.append(assign_context_manager)
1553
+
1554
+ has_check = ast.Call(
1555
+ func=ast.Attribute(
1556
+ value=ast.Name(id="context_manager", ctx=ast.Load()),
1557
+ attr="has_current_workflow",
1558
+ ctx=ast.Load(),
1559
+ ),
1560
+ args=[],
1561
+ keywords=[],
1562
+ )
1563
+ test = ast.UnaryOp(op=ast.Not(), operand=has_check)
1564
+
1565
+ push_call = ast.Expr(
1566
+ value=ast.Call(
1567
+ func=ast.Attribute(
1568
+ value=ast.Name(id="context_manager", ctx=ast.Load()),
1569
+ attr="push_workflow",
1570
+ ctx=ast.Load(),
1571
+ ),
1572
+ args=[],
1573
+ keywords=[ast.keyword(arg="workflow_name", value=ast.Constant(value=workflow_name))],
1574
+ )
1575
+ )
1576
+ ast.fix_missing_locations(push_call)
1577
+
1578
+ if_stmt = ast.If(
1579
+ test=test,
1580
+ body=[push_call],
1581
+ orelse=[],
1582
+ )
1583
+ ast.fix_missing_locations(if_stmt)
1584
+ code_blocks.append(if_stmt)
1585
+ return code_blocks
1586
+
1125
1587
  def _generate_unique_values_code(
1126
1588
  self,
1127
1589
  unique_parameter_uuid_to_values: dict[SerializedNodeCommands.UniqueParameterValueUUID, Any],
@@ -1137,19 +1599,35 @@ class WorkflowManager:
1137
1599
  global_modules_set = {"builtins", "__main__"}
1138
1600
 
1139
1601
  # Serialize the unique values as pickled strings.
1602
+ # IMPORTANT: We patch dynamic module names to stable namespaces before pickling
1603
+ # to ensure generated workflows can reliably import the required classes.
1140
1604
  unique_parameter_dict = {}
1605
+
1141
1606
  for uuid, unique_parameter_value in unique_parameter_uuid_to_values.items():
1142
- unique_parameter_bytes = pickle.dumps(unique_parameter_value)
1607
+ # Dynamic Module Patching Strategy:
1608
+ # When we pickle objects from dynamically loaded modules (like VideoUrlArtifact),
1609
+ # pickle stores the class's __module__ attribute in the binary data. If we don't
1610
+ # patch this, the pickle data would contain something like:
1611
+ # "gtn_dynamic_module_image_to_video_py_123456789.VideoUrlArtifact"
1612
+ #
1613
+ # When the workflow runs later, Python tries to import this module name, which
1614
+ # fails because dynamic modules don't exist in fresh Python processes.
1615
+ #
1616
+ # Our solution: Temporarily patch the class's __module__ to use the stable namespace
1617
+ # before pickling, so the pickle data contains:
1618
+ # "griptape_nodes.node_libraries.runwayml_library.image_to_video.VideoUrlArtifact"
1619
+ #
1620
+ # This includes recursive patching for nested objects in containers (lists, tuples, dicts)
1621
+
1622
+ # Apply recursive dynamic module patching, pickle, then restore
1623
+ unique_parameter_bytes = self._patch_and_pickle_object(unique_parameter_value)
1624
+
1143
1625
  # Encode the bytes as a string using latin1
1144
1626
  unique_parameter_byte_str = unique_parameter_bytes.decode("latin1")
1145
1627
  unique_parameter_dict[uuid] = unique_parameter_byte_str
1146
1628
 
1147
- # Add import for the unique parameter value's class/module. But not globals.
1148
- value_type = type(unique_parameter_value)
1149
- if isclass(value_type):
1150
- module = getmodule(value_type)
1151
- if module and module.__name__ not in global_modules_set:
1152
- import_recorder.add_from_import(module.__name__, value_type.__name__)
1629
+ # Collect import statements for all classes in the object tree
1630
+ self._collect_object_imports(unique_parameter_value, import_recorder, global_modules_set)
1153
1631
 
1154
1632
  # Generate a comment explaining what we're doing:
1155
1633
  comment_text = (
@@ -1348,6 +1826,7 @@ class WorkflowManager:
1348
1826
  ) -> list[ast.stmt]:
1349
1827
  # Ensure necessary imports are recorded
1350
1828
  import_recorder.add_from_import("griptape_nodes.node_library.library_registry", "NodeMetadata")
1829
+ import_recorder.add_from_import("griptape_nodes.node_library.library_registry", "IconVariant")
1351
1830
  import_recorder.add_from_import("griptape_nodes.retained_mode.events.node_events", "CreateNodeRequest")
1352
1831
  import_recorder.add_from_import(
1353
1832
  "griptape_nodes.retained_mode.events.parameter_events", "AddParameterToNodeRequest"
@@ -1720,8 +2199,8 @@ class WorkflowManager:
1720
2199
  }
1721
2200
  return workflow_shape
1722
2201
 
1723
- def _validate_workflow_shape_for_publish(self, workflow_name: str) -> dict[str, Any]:
1724
- """Validates the workflow shape for publishing.
2202
+ def _extract_workflow_shape(self, workflow_name: str) -> dict[str, Any]:
2203
+ """Extracts the input and output shape for a workflow.
1725
2204
 
1726
2205
  Here we gather information about the Workflow's exposed input and output Parameters
1727
2206
  such that a client invoking the Workflow can understand what values to provide
@@ -1755,11 +2234,9 @@ class WorkflowManager:
1755
2234
  end_nodes.append(node)
1756
2235
  if len(start_nodes) < 1:
1757
2236
  details = f"Workflow '{workflow_name}' does not have a StartNode."
1758
- logger.error(details)
1759
2237
  raise ValueError(details)
1760
2238
  if len(end_nodes) < 1:
1761
2239
  details = f"Workflow '{workflow_name}' does not have an EndNode."
1762
- logger.error(details)
1763
2240
  raise ValueError(details)
1764
2241
 
1765
2242
  # Now, we need to gather the input and output parameters for each node type.
@@ -2101,7 +2578,7 @@ class WorkflowManager:
2101
2578
  def on_publish_workflow_request(self, request: PublishWorkflowRequest) -> ResultPayload:
2102
2579
  try:
2103
2580
  # Get the workflow shape
2104
- workflow_shape = self._validate_workflow_shape_for_publish(request.workflow_name)
2581
+ workflow_shape = self._extract_workflow_shape(request.workflow_name)
2105
2582
  logger.info("Workflow shape: %s", workflow_shape)
2106
2583
 
2107
2584
  # Package the workflow
@@ -2132,6 +2609,158 @@ class WorkflowManager:
2132
2609
  logger.error(details)
2133
2610
  return PublishWorkflowResultFailure()
2134
2611
 
2612
+ def _walk_object_tree(
2613
+ self, obj: Any, process_class_fn: Callable[[type, Any], None], visited: set[int] | None = None
2614
+ ) -> None:
2615
+ """Recursively walk through object tree, calling process_class_fn for each class found.
2616
+
2617
+ This unified helper handles the common pattern of recursively traversing nested objects
2618
+ to find all class instances. Used by both patching and import collection.
2619
+
2620
+ Args:
2621
+ obj: Object to traverse (can contain nested lists, dicts, class instances)
2622
+ process_class_fn: Function to call for each class found, signature: (class_type, instance)
2623
+ visited: Set of object IDs already visited (for circular reference protection)
2624
+
2625
+ Example:
2626
+ # Collect all class types in a nested structure
2627
+ def collect_type(cls, instance):
2628
+ print(f"Found {cls.__name__} instance")
2629
+
2630
+ data = [SomeClass(), {"key": AnotherClass()}]
2631
+ self._walk_object_tree(data, collect_type)
2632
+ """
2633
+ if visited is None:
2634
+ visited = set()
2635
+
2636
+ obj_id = id(obj)
2637
+ if obj_id in visited:
2638
+ return
2639
+ visited.add(obj_id)
2640
+
2641
+ # Process the object if it's a class instance
2642
+ obj_type = type(obj)
2643
+ if isclass(obj_type):
2644
+ process_class_fn(obj_type, obj)
2645
+
2646
+ # Recursively traverse containers
2647
+ if isinstance(obj, (list, tuple)):
2648
+ for item in obj:
2649
+ self._walk_object_tree(item, process_class_fn, visited)
2650
+ elif isinstance(obj, dict):
2651
+ for key, value in obj.items():
2652
+ self._walk_object_tree(key, process_class_fn, visited)
2653
+ self._walk_object_tree(value, process_class_fn, visited)
2654
+ elif hasattr(obj, "__dict__"):
2655
+ for attr_value in obj.__dict__.values():
2656
+ self._walk_object_tree(attr_value, process_class_fn, visited)
2657
+
2658
+ def _patch_and_pickle_object(self, obj: Any) -> bytes:
2659
+ """Patch dynamic module references to stable namespaces, pickle object, then restore.
2660
+
2661
+ This solves the "pickle data was truncated" error that occurs when workflows containing
2662
+ objects from dynamically loaded modules (like VideoUrlArtifact, ReferenceImageArtifact)
2663
+ are serialized and later reloaded in a fresh Python process.
2664
+
2665
+ The Problem:
2666
+ Dynamic modules get names like "gtn_dynamic_module_image_to_video_py_123456789"
2667
+ When pickle serializes objects, it embeds these module names in the binary data
2668
+ When workflows run later, Python can't import these non-existent module names
2669
+
2670
+ The Solution:
2671
+ 1. Recursively find all objects from dynamic modules (even nested in containers)
2672
+ 2. Temporarily patch their __module__ and module_name to stable namespaces
2673
+ 3. Pickle with stable references like "griptape_nodes.node_libraries.runwayml_library.image_to_video"
2674
+ 4. Restore original names to avoid side effects
2675
+
2676
+ Args:
2677
+ obj: Object to patch and pickle (may contain nested structures)
2678
+
2679
+ Returns:
2680
+ Pickled bytes with stable module references
2681
+
2682
+ Example:
2683
+ Before: pickle contains "gtn_dynamic_module_image_to_video_py_123456789.VideoUrlArtifact"
2684
+ After: pickle contains "griptape_nodes.node_libraries.runwayml_library.image_to_video.VideoUrlArtifact"
2685
+ """
2686
+ patched_classes: list[tuple[type, str]] = []
2687
+ patched_instances: list[tuple[Any, str]] = []
2688
+
2689
+ def patch_class(class_type: type, instance: Any) -> None:
2690
+ """Patch a single class instance to use stable namespace."""
2691
+ module = getmodule(class_type)
2692
+ if module and GriptapeNodes.LibraryManager().is_dynamic_module(module.__name__):
2693
+ stable_namespace = GriptapeNodes.LibraryManager().get_stable_namespace_for_dynamic_module(
2694
+ module.__name__
2695
+ )
2696
+ if stable_namespace:
2697
+ # Patch class __module__ (affects pickle class reference)
2698
+ if class_type.__module__ != stable_namespace:
2699
+ patched_classes.append((class_type, class_type.__module__))
2700
+ class_type.__module__ = stable_namespace
2701
+
2702
+ # Patch instance module_name field (affects SerializableMixin serialization)
2703
+ if hasattr(instance, "module_name") and instance.module_name != stable_namespace:
2704
+ patched_instances.append((instance, instance.module_name))
2705
+ instance.module_name = stable_namespace
2706
+
2707
+ try:
2708
+ # Apply patches to entire object tree
2709
+ self._walk_object_tree(obj, patch_class)
2710
+ return pickle.dumps(obj)
2711
+ finally:
2712
+ # Always restore original names to avoid affecting other code
2713
+ for class_obj, original_name in patched_classes:
2714
+ class_obj.__module__ = original_name
2715
+ for instance_obj, original_name in patched_instances:
2716
+ instance_obj.module_name = original_name
2717
+
2718
+ def _collect_object_imports(self, obj: Any, import_recorder: Any, global_modules_set: set[str]) -> None:
2719
+ """Recursively collect import statements needed for all classes in object tree.
2720
+
2721
+ This ensures that generated workflows have all necessary import statements,
2722
+ including for classes nested deep within containers like ParameterArrays.
2723
+
2724
+ The Process:
2725
+ 1. Walk through entire object tree (lists, dicts, object attributes)
2726
+ 2. For each class found, determine the correct import statement
2727
+ 3. For dynamic modules, use stable namespace imports
2728
+ 4. For regular modules, use standard imports
2729
+ 5. Record all imports for workflow generation
2730
+
2731
+ Args:
2732
+ obj: Object tree to analyze for required imports
2733
+ import_recorder: Collector that will generate the import statements
2734
+ global_modules_set: Built-in modules that don't need explicit imports
2735
+
2736
+ Example:
2737
+ Input object tree: [ReferenceImageArtifact(), {"data": ImageUrlArtifact()}]
2738
+ Generated imports:
2739
+ from griptape_nodes.node_libraries.runwayml_library.create_reference_image import ReferenceImageArtifact
2740
+ from griptape.artifacts.image_url_artifact import ImageUrlArtifact
2741
+ """
2742
+
2743
+ def collect_class_import(class_type: type, _instance: Any) -> None:
2744
+ """Collect import statement for a single class."""
2745
+ module = getmodule(class_type)
2746
+ if module and module.__name__ not in global_modules_set:
2747
+ if GriptapeNodes.LibraryManager().is_dynamic_module(module.__name__):
2748
+ # Use stable namespace for dynamic modules
2749
+ stable_namespace = GriptapeNodes.LibraryManager().get_stable_namespace_for_dynamic_module(
2750
+ module.__name__
2751
+ )
2752
+ if stable_namespace:
2753
+ import_recorder.add_from_import(stable_namespace, class_type.__name__)
2754
+ else:
2755
+ msg = f"Missing stable namespace for {module.__name__} type {class_type.__name__}"
2756
+ logger.error(msg)
2757
+ raise RuntimeError(msg)
2758
+ else:
2759
+ # Use regular module name for standard modules
2760
+ import_recorder.add_from_import(module.__name__, class_type.__name__)
2761
+
2762
+ self._walk_object_tree(obj, collect_class_import)
2763
+
2135
2764
 
2136
2765
  class ASTContainer:
2137
2766
  """ASTContainer is a helper class to keep track of AST nodes and generate final code from them."""