griptape-nodes 0.57.1__py3-none-any.whl → 0.58.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. griptape_nodes/api_client/__init__.py +9 -0
  2. griptape_nodes/api_client/client.py +279 -0
  3. griptape_nodes/api_client/request_client.py +273 -0
  4. griptape_nodes/app/app.py +57 -150
  5. griptape_nodes/bootstrap/utils/python_subprocess_executor.py +1 -1
  6. griptape_nodes/bootstrap/workflow_executors/local_session_workflow_executor.py +22 -50
  7. griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +6 -1
  8. griptape_nodes/bootstrap/workflow_executors/subprocess_workflow_executor.py +27 -46
  9. griptape_nodes/bootstrap/workflow_executors/utils/subprocess_script.py +7 -0
  10. griptape_nodes/bootstrap/workflow_publishers/local_workflow_publisher.py +3 -1
  11. griptape_nodes/bootstrap/workflow_publishers/subprocess_workflow_publisher.py +3 -1
  12. griptape_nodes/bootstrap/workflow_publishers/utils/subprocess_script.py +16 -1
  13. griptape_nodes/common/node_executor.py +466 -0
  14. griptape_nodes/drivers/storage/base_storage_driver.py +0 -11
  15. griptape_nodes/drivers/storage/griptape_cloud_storage_driver.py +7 -25
  16. griptape_nodes/drivers/storage/local_storage_driver.py +2 -2
  17. griptape_nodes/exe_types/connections.py +37 -9
  18. griptape_nodes/exe_types/core_types.py +1 -1
  19. griptape_nodes/exe_types/node_types.py +115 -22
  20. griptape_nodes/machines/control_flow.py +48 -7
  21. griptape_nodes/machines/parallel_resolution.py +98 -29
  22. griptape_nodes/machines/sequential_resolution.py +61 -22
  23. griptape_nodes/node_library/library_registry.py +24 -1
  24. griptape_nodes/node_library/workflow_registry.py +38 -2
  25. griptape_nodes/retained_mode/events/execution_events.py +8 -1
  26. griptape_nodes/retained_mode/events/flow_events.py +90 -3
  27. griptape_nodes/retained_mode/events/node_events.py +17 -10
  28. griptape_nodes/retained_mode/events/workflow_events.py +5 -0
  29. griptape_nodes/retained_mode/griptape_nodes.py +16 -219
  30. griptape_nodes/retained_mode/managers/config_manager.py +0 -46
  31. griptape_nodes/retained_mode/managers/engine_identity_manager.py +225 -74
  32. griptape_nodes/retained_mode/managers/flow_manager.py +1276 -230
  33. griptape_nodes/retained_mode/managers/library_manager.py +7 -8
  34. griptape_nodes/retained_mode/managers/node_manager.py +197 -9
  35. griptape_nodes/retained_mode/managers/secrets_manager.py +26 -0
  36. griptape_nodes/retained_mode/managers/session_manager.py +264 -227
  37. griptape_nodes/retained_mode/managers/settings.py +4 -38
  38. griptape_nodes/retained_mode/managers/static_files_manager.py +3 -3
  39. griptape_nodes/retained_mode/managers/version_compatibility_manager.py +135 -6
  40. griptape_nodes/retained_mode/managers/workflow_manager.py +206 -78
  41. griptape_nodes/servers/mcp.py +23 -15
  42. griptape_nodes/utils/async_utils.py +36 -0
  43. griptape_nodes/utils/dict_utils.py +8 -2
  44. griptape_nodes/version_compatibility/versions/v0_39_0/modified_parameters_set_removal.py +11 -6
  45. griptape_nodes/version_compatibility/workflow_versions/v0_7_0/local_executor_argument_addition.py +12 -5
  46. {griptape_nodes-0.57.1.dist-info → griptape_nodes-0.58.0.dist-info}/METADATA +4 -3
  47. {griptape_nodes-0.57.1.dist-info → griptape_nodes-0.58.0.dist-info}/RECORD +49 -47
  48. {griptape_nodes-0.57.1.dist-info → griptape_nodes-0.58.0.dist-info}/WHEEL +1 -1
  49. griptape_nodes/retained_mode/utils/engine_identity.py +0 -245
  50. griptape_nodes/servers/ws_request_manager.py +0 -268
  51. {griptape_nodes-0.57.1.dist-info → griptape_nodes-0.58.0.dist-info}/entry_points.txt +0 -0
@@ -34,6 +34,12 @@ def _main() -> None:
34
34
  default=None,
35
35
  help="Path to the Griptape Nodes workflow file",
36
36
  )
37
+ parser.add_argument(
38
+ "--pickle-control-flow-result",
39
+ action="store_true",
40
+ default=False,
41
+ help="Whether to pickle control flow results",
42
+ )
37
43
  args = parser.parse_args()
38
44
  flow_input = json.loads(args.json_input)
39
45
 
@@ -44,6 +50,7 @@ def _main() -> None:
44
50
  execute_workflow(
45
51
  input=flow_input,
46
52
  workflow_executor=local_session_workflow_executor,
53
+ pickle_control_flow_result=args.pickle_control_flow_result,
47
54
  )
48
55
 
49
56
 
@@ -24,15 +24,17 @@ class LocalWorkflowPublisher(LocalWorkflowExecutor):
24
24
  workflow_path: str,
25
25
  publisher_name: str,
26
26
  published_workflow_file_name: str,
27
- **kwargs: Any, # noqa: ARG002
27
+ **kwargs: Any,
28
28
  ) -> None:
29
29
  # Load the workflow into memory
30
30
  await self.aprepare_workflow_for_run(workflow_name=workflow_name, flow_input={}, workflow_path=workflow_path)
31
+ pickle_control_flow_result = kwargs.get("pickle_control_flow_result", False)
31
32
  publish_workflow_request = PublishWorkflowRequest(
32
33
  workflow_name=workflow_name,
33
34
  publisher_name=publisher_name,
34
35
  execute_on_publish=False,
35
36
  published_workflow_file_name=published_workflow_file_name,
37
+ pickle_control_flow_result=pickle_control_flow_result,
36
38
  )
37
39
  publish_workflow_result = await GriptapeNodes.ahandle_request(publish_workflow_request)
38
40
 
@@ -41,7 +41,7 @@ class SubprocessWorkflowPublisher(LocalWorkflowPublisher, PythonSubprocessExecut
41
41
  workflow_path: str,
42
42
  publisher_name: str,
43
43
  published_workflow_file_name: str,
44
- **kwargs: Any, # noqa: ARG002
44
+ **kwargs: Any,
45
45
  ) -> None:
46
46
  """Publish a workflow in a subprocess and wait for completion."""
47
47
  script_path = Path(__file__).parent / "utils" / "subprocess_script.py"
@@ -77,6 +77,8 @@ class SubprocessWorkflowPublisher(LocalWorkflowPublisher, PythonSubprocessExecut
77
77
  "--published-workflow-file-name",
78
78
  published_workflow_file_name,
79
79
  ]
80
+ if kwargs.get("pickle_control_flow_result"):
81
+ args.append("--pickle-control-flow-result")
80
82
  await self.execute_python_script(
81
83
  script_path=tmp_script_path,
82
84
  args=args,
@@ -9,7 +9,14 @@ logging.basicConfig(level=logging.INFO)
9
9
  logger = logging.getLogger(__name__)
10
10
 
11
11
 
12
- async def _main(workflow_name: str, workflow_path: str, publisher_name: str, published_workflow_file_name: str) -> None:
12
+ async def _main(
13
+ workflow_name: str,
14
+ workflow_path: str,
15
+ publisher_name: str,
16
+ published_workflow_file_name: str,
17
+ *,
18
+ pickle_control_flow_result: bool,
19
+ ) -> None:
13
20
  local_publisher = LocalWorkflowPublisher()
14
21
  async with local_publisher as publisher:
15
22
  await publisher.arun(
@@ -17,6 +24,7 @@ async def _main(workflow_name: str, workflow_path: str, publisher_name: str, pub
17
24
  workflow_path=workflow_path,
18
25
  publisher_name=publisher_name,
19
26
  published_workflow_file_name=published_workflow_file_name,
27
+ pickle_control_flow_result=pickle_control_flow_result,
20
28
  )
21
29
 
22
30
  msg = f"Published workflow to file: {published_workflow_file_name}"
@@ -43,6 +51,12 @@ if __name__ == "__main__":
43
51
  parser.add_argument(
44
52
  "--published-workflow-file-name", help="Name to use for the published workflow file", required=True
45
53
  )
54
+ parser.add_argument(
55
+ "--pickle-control-flow-result",
56
+ action="store_true",
57
+ default=False,
58
+ help="Whether to pickle control flow results",
59
+ )
46
60
  args = parser.parse_args()
47
61
  asyncio.run(
48
62
  _main(
@@ -50,5 +64,6 @@ if __name__ == "__main__":
50
64
  workflow_path=args.workflow_path,
51
65
  publisher_name=args.publisher_name,
52
66
  published_workflow_file_name=args.published_workflow_file_name,
67
+ pickle_control_flow_result=args.pickle_control_flow_result,
53
68
  )
54
69
  )
@@ -0,0 +1,466 @@
1
+ from __future__ import annotations
2
+
3
+ import ast
4
+ import logging
5
+ import pickle
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING, Any, NamedTuple
8
+
9
+ from griptape_nodes.bootstrap.workflow_publishers.subprocess_workflow_publisher import SubprocessWorkflowPublisher
10
+ from griptape_nodes.drivers.storage.storage_backend import StorageBackend
11
+ from griptape_nodes.exe_types.core_types import ParameterTypeBuiltin
12
+ from griptape_nodes.exe_types.node_types import (
13
+ CONTROL_INPUT_PARAMETER,
14
+ LOCAL_EXECUTION,
15
+ PRIVATE_EXECUTION,
16
+ EndNode,
17
+ StartNode,
18
+ )
19
+ from griptape_nodes.node_library.library_registry import Library, LibraryRegistry
20
+ from griptape_nodes.node_library.workflow_registry import WorkflowRegistry
21
+ from griptape_nodes.retained_mode.events.flow_events import (
22
+ PackageNodeAsSerializedFlowRequest,
23
+ PackageNodeAsSerializedFlowResultSuccess,
24
+ )
25
+ from griptape_nodes.retained_mode.events.workflow_events import (
26
+ DeleteWorkflowRequest,
27
+ DeleteWorkflowResultFailure,
28
+ LoadWorkflowMetadata,
29
+ LoadWorkflowMetadataResultSuccess,
30
+ PublishWorkflowRequest,
31
+ SaveWorkflowFileFromSerializedFlowRequest,
32
+ SaveWorkflowFileFromSerializedFlowResultSuccess,
33
+ )
34
+ from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
35
+
36
+ if TYPE_CHECKING:
37
+ from griptape_nodes.exe_types.node_types import BaseNode
38
+ from griptape_nodes.retained_mode.events.node_events import SerializedNodeCommands
39
+ from griptape_nodes.retained_mode.managers.library_manager import LibraryManager
40
+
41
+ logger = logging.getLogger("griptape_nodes")
42
+
43
+
44
+ class PublishLocalWorkflowResult(NamedTuple):
45
+ """Result from publishing a local workflow."""
46
+
47
+ workflow_result: SaveWorkflowFileFromSerializedFlowResultSuccess
48
+ file_name: str
49
+ output_parameter_prefix: str
50
+
51
+
52
+ class NodeExecutor:
53
+ """Singleton executor that executes nodes dynamically."""
54
+
55
+ def get_workflow_handler(self, library_name: str) -> LibraryManager.RegisteredEventHandler:
56
+ """Get the PublishWorkflowRequest handler for a library, or None if not available."""
57
+ library_manager = GriptapeNodes.LibraryManager()
58
+ registered_handlers = library_manager.get_registered_event_handlers(PublishWorkflowRequest)
59
+ if library_name in registered_handlers:
60
+ return registered_handlers[library_name]
61
+ msg = f"Could not find PublishWorkflowRequest handler for library {library_name}"
62
+ raise ValueError(msg)
63
+
64
+ async def execute(self, node: BaseNode) -> None:
65
+ """Execute the given node.
66
+
67
+ Args:
68
+ node: The BaseNode to execute
69
+ library_name: The library that the execute method should come from.
70
+ """
71
+ execution_type = node.get_parameter_value(node.execution_environment.name)
72
+ if execution_type == LOCAL_EXECUTION:
73
+ await node.aprocess()
74
+ elif execution_type == PRIVATE_EXECUTION:
75
+ await self._execute_private_workflow(node)
76
+ else:
77
+ await self._execute_library_workflow(node, execution_type)
78
+
79
+ async def _execute_and_apply_workflow(
80
+ self,
81
+ node: BaseNode,
82
+ workflow_path: Path,
83
+ file_name: str,
84
+ output_parameter_prefix: str,
85
+ ) -> None:
86
+ """Execute workflow in subprocess and apply results to node.
87
+
88
+ Args:
89
+ node: The node to apply results to
90
+ workflow_path: Path to workflow file to execute
91
+ file_name: Name of workflow for logging
92
+ output_parameter_prefix: Prefix for output parameters
93
+ """
94
+ my_subprocess_result = await self._execute_subprocess(workflow_path, file_name)
95
+ parameter_output_values = self._extract_parameter_output_values(my_subprocess_result)
96
+ self._apply_parameter_values_to_node(node, parameter_output_values, output_parameter_prefix)
97
+
98
+ async def _execute_private_workflow(self, node: BaseNode) -> None:
99
+ """Execute node in private subprocess environment.
100
+
101
+ Args:
102
+ node: The node to execute
103
+ """
104
+ workflow_result = None
105
+ try:
106
+ result = await self._publish_local_workflow(node)
107
+ workflow_result = result.workflow_result
108
+ except Exception as e:
109
+ logger.exception(
110
+ "Failed to publish local workflow for node '%s'. Node type: %s",
111
+ node.name,
112
+ node.__class__.__name__,
113
+ )
114
+ msg = f"Failed to publish workflow for node '{node.name}': {e}"
115
+ raise RuntimeError(msg) from e
116
+
117
+ try:
118
+ await self._execute_and_apply_workflow(
119
+ node, Path(workflow_result.file_path), result.file_name, result.output_parameter_prefix
120
+ )
121
+ except RuntimeError:
122
+ raise
123
+ except Exception as e:
124
+ logger.exception(
125
+ "Subprocess execution failed for node '%s'. Node type: %s",
126
+ node.name,
127
+ node.__class__.__name__,
128
+ )
129
+ msg = f"Failed to execute node '{node.name}' in local subprocess: {e}"
130
+ raise RuntimeError(msg) from e
131
+ finally:
132
+ if workflow_result is not None:
133
+ await self._delete_workflow(
134
+ workflow_result.workflow_metadata.name, workflow_path=Path(workflow_result.file_path)
135
+ )
136
+
137
+ async def _execute_library_workflow(self, node: BaseNode, execution_type: str) -> None:
138
+ """Execute node via library handler.
139
+
140
+ Args:
141
+ node: The node to execute
142
+ execution_type: Library name for execution
143
+ """
144
+ try:
145
+ library = LibraryRegistry.get_library(name=execution_type)
146
+ except KeyError:
147
+ msg = f"Could not find library for execution environment {execution_type} for node {node.name}."
148
+ raise RuntimeError(msg) # noqa: B904
149
+
150
+ library_name = library.get_library_data().name
151
+
152
+ try:
153
+ self.get_workflow_handler(library_name)
154
+ except ValueError as e:
155
+ logger.error("Library execution failed for node '%s' via library '%s': %s", node.name, library_name, e)
156
+ msg = f"Failed to execute node '{node.name}' via library '{library_name}': {e}"
157
+ raise RuntimeError(msg) from e
158
+
159
+ workflow_result = None
160
+ published_workflow_filename = None
161
+
162
+ try:
163
+ result = await self._publish_local_workflow(node, library=library)
164
+ workflow_result = result.workflow_result
165
+ except Exception as e:
166
+ logger.exception(
167
+ "Failed to publish local workflow for node '%s' via library '%s'. Node type: %s",
168
+ node.name,
169
+ library_name,
170
+ node.__class__.__name__,
171
+ )
172
+ msg = f"Failed to publish workflow for node '{node.name}' via library '{library_name}': {e}"
173
+ raise RuntimeError(msg) from e
174
+
175
+ try:
176
+ published_workflow_filename = await self._publish_library_workflow(
177
+ workflow_result, library_name, result.file_name
178
+ )
179
+ except Exception as e:
180
+ logger.exception(
181
+ "Failed to publish library workflow for node '%s' via library '%s'. Node type: %s",
182
+ node.name,
183
+ library_name,
184
+ node.__class__.__name__,
185
+ )
186
+ msg = f"Failed to publish library workflow for node '{node.name}' via library '{library_name}': {e}"
187
+ raise RuntimeError(msg) from e
188
+
189
+ try:
190
+ await self._execute_and_apply_workflow(
191
+ node, published_workflow_filename, result.file_name, result.output_parameter_prefix
192
+ )
193
+ except RuntimeError:
194
+ raise
195
+ except Exception as e:
196
+ logger.exception(
197
+ "Subprocess execution failed for node '%s' via library '%s'. Node type: %s",
198
+ node.name,
199
+ library_name,
200
+ node.__class__.__name__,
201
+ )
202
+ msg = f"Failed to execute node '{node.name}' via library '{library_name}': {e}"
203
+ raise RuntimeError(msg) from e
204
+ finally:
205
+ if workflow_result is not None:
206
+ await self._delete_workflow(
207
+ workflow_name=workflow_result.workflow_metadata.name, workflow_path=Path(workflow_result.file_path)
208
+ )
209
+ if published_workflow_filename is not None:
210
+ published_filename = published_workflow_filename.stem
211
+ await self._delete_workflow(workflow_name=published_filename, workflow_path=published_workflow_filename)
212
+
213
+ async def _publish_local_workflow(
214
+ self, node: BaseNode, library: Library | None = None
215
+ ) -> PublishLocalWorkflowResult:
216
+ """Package and publish a workflow for subprocess execution.
217
+
218
+ Returns:
219
+ PublishLocalWorkflowResult containing workflow_result, file_name, and output_parameter_prefix
220
+ """
221
+ sanitized_node_name = node.name.replace(" ", "_")
222
+ output_parameter_prefix = f"{sanitized_node_name}_packaged_node_"
223
+ # We have to make our defaults strings because the PackageNodeAsSerializedFlowRequest doesn't accept None types.
224
+ library_name = "Griptape Nodes Library"
225
+ start_node_type = "StartFlow"
226
+ end_node_type = "EndFlow"
227
+ if library is not None:
228
+ start_nodes = library.get_nodes_by_base_type(StartNode)
229
+ end_nodes = library.get_nodes_by_base_type(EndNode)
230
+ if len(start_nodes) > 0 and len(end_nodes) > 0:
231
+ start_node_type = start_nodes[0]
232
+ end_node_type = end_nodes[0]
233
+ library_name = library.get_library_data().name
234
+ sanitized_library_name = library_name.replace(" ", "_")
235
+ request = PackageNodeAsSerializedFlowRequest(
236
+ node_name=node.name,
237
+ start_node_type=start_node_type,
238
+ end_node_type=end_node_type,
239
+ start_end_specific_library_name=library_name,
240
+ entry_control_parameter_name=node._entry_control_parameter.name
241
+ if node._entry_control_parameter is not None
242
+ else None,
243
+ output_parameter_prefix=output_parameter_prefix,
244
+ )
245
+
246
+ package_result = GriptapeNodes.handle_request(request)
247
+ if not isinstance(package_result, PackageNodeAsSerializedFlowResultSuccess):
248
+ msg = f"Failed to package node '{node.name}'. Error: {package_result.result_details}"
249
+ raise RuntimeError(msg) # noqa: TRY004
250
+
251
+ file_name = f"{sanitized_node_name}_{sanitized_library_name}_packaged_flow"
252
+ workflow_file_request = SaveWorkflowFileFromSerializedFlowRequest(
253
+ file_name=file_name,
254
+ serialized_flow_commands=package_result.serialized_flow_commands,
255
+ workflow_shape=package_result.workflow_shape,
256
+ pickle_control_flow_result=True,
257
+ )
258
+
259
+ workflow_result = GriptapeNodes.handle_request(workflow_file_request)
260
+ if not isinstance(workflow_result, SaveWorkflowFileFromSerializedFlowResultSuccess):
261
+ msg = f"Failed to Save Workflow File from Serialized Flow for node '{node.name}'. Error: {package_result.result_details}"
262
+ raise RuntimeError(msg) # noqa: TRY004
263
+
264
+ return PublishLocalWorkflowResult(
265
+ workflow_result=workflow_result, file_name=file_name, output_parameter_prefix=output_parameter_prefix
266
+ )
267
+
268
+ async def _publish_library_workflow(
269
+ self, workflow_result: SaveWorkflowFileFromSerializedFlowResultSuccess, library_name: str, file_name: str
270
+ ) -> Path:
271
+ subprocess_workflow_publisher = SubprocessWorkflowPublisher()
272
+ published_filename = f"{Path(workflow_result.file_path).stem}_published"
273
+ published_workflow_filename = GriptapeNodes.ConfigManager().workspace_path / (published_filename + ".py")
274
+
275
+ await subprocess_workflow_publisher.arun(
276
+ workflow_name=file_name,
277
+ workflow_path=workflow_result.file_path,
278
+ publisher_name=library_name,
279
+ published_workflow_file_name=published_filename,
280
+ pickle_control_flow_result=True,
281
+ )
282
+
283
+ if not published_workflow_filename.exists():
284
+ msg = f"Published workflow file does not exist at path: {published_workflow_filename}"
285
+ raise FileNotFoundError(msg)
286
+
287
+ return published_workflow_filename
288
+
289
+ async def _execute_subprocess(
290
+ self,
291
+ published_workflow_filename: Path,
292
+ file_name: str,
293
+ pickle_control_flow_result: bool = True, # noqa: FBT001, FBT002
294
+ ) -> dict[str, dict[str | SerializedNodeCommands.UniqueParameterValueUUID, Any] | None]:
295
+ """Execute the published workflow in a subprocess.
296
+
297
+ Args:
298
+ published_workflow_filename: Path to the workflow file to execute
299
+ file_name: Name of the workflow for logging
300
+ pickle_control_flow_result: Whether to pickle control flow results (defaults to True)
301
+
302
+ Returns:
303
+ The subprocess execution output dictionary
304
+ """
305
+ from griptape_nodes.bootstrap.workflow_executors.subprocess_workflow_executor import (
306
+ SubprocessWorkflowExecutor,
307
+ )
308
+
309
+ subprocess_executor = SubprocessWorkflowExecutor(workflow_path=str(published_workflow_filename))
310
+
311
+ try:
312
+ async with subprocess_executor as executor:
313
+ await executor.arun(
314
+ workflow_name=file_name,
315
+ flow_input={},
316
+ storage_backend=await self._get_storage_backend(),
317
+ pickle_control_flow_result=pickle_control_flow_result,
318
+ )
319
+ except RuntimeError as e:
320
+ # Subprocess returned non-zero exit code
321
+ logger.error(
322
+ "Subprocess execution failed for workflow '%s' at path '%s'. Error: %s",
323
+ file_name,
324
+ published_workflow_filename,
325
+ e,
326
+ )
327
+ raise
328
+
329
+ my_subprocess_result = subprocess_executor.output
330
+ if my_subprocess_result is None:
331
+ msg = f"Subprocess completed but returned no output for workflow '{file_name}'"
332
+ logger.error(msg)
333
+ raise ValueError(msg)
334
+ return my_subprocess_result
335
+
336
+ def _extract_parameter_output_values(
337
+ self, subprocess_result: dict[str, dict[str | SerializedNodeCommands.UniqueParameterValueUUID, Any] | None]
338
+ ) -> dict[str, Any]:
339
+ """Extract and deserialize parameter output values from subprocess result.
340
+
341
+ Returns:
342
+ Dictionary of parameter names to their deserialized values
343
+ """
344
+ parameter_output_values = {}
345
+ for result_dict in subprocess_result.values():
346
+ # Handle backward compatibility: old flat structure
347
+ if not isinstance(result_dict, dict) or "parameter_output_values" not in result_dict:
348
+ parameter_output_values.update(result_dict) # type: ignore[arg-type]
349
+ continue
350
+
351
+ param_output_vals = result_dict["parameter_output_values"]
352
+ unique_uuid_to_values = result_dict.get("unique_parameter_uuid_to_values")
353
+
354
+ # No UUID mapping - use values directly
355
+ if not unique_uuid_to_values:
356
+ parameter_output_values.update(param_output_vals)
357
+ continue
358
+
359
+ # Deserialize UUID-referenced values
360
+ for param_name, param_value in param_output_vals.items():
361
+ parameter_output_values[param_name] = self._deserialize_parameter_value(
362
+ param_name, param_value, unique_uuid_to_values
363
+ )
364
+ return parameter_output_values
365
+
366
+ def _deserialize_parameter_value(self, param_name: str, param_value: Any, unique_uuid_to_values: dict) -> Any:
367
+ """Deserialize a single parameter value, handling UUID references and pickling.
368
+
369
+ Args:
370
+ param_name: Parameter name for logging
371
+ param_value: Either a direct value or UUID reference
372
+ unique_uuid_to_values: Mapping of UUIDs to pickled values
373
+
374
+ Returns:
375
+ Deserialized parameter value
376
+ """
377
+ # Direct value (not a UUID reference)
378
+ if param_value not in unique_uuid_to_values:
379
+ return param_value
380
+
381
+ stored_value = unique_uuid_to_values[param_value]
382
+
383
+ # Non-string stored values are used directly
384
+ if not isinstance(stored_value, str):
385
+ return stored_value
386
+
387
+ # Attempt to unpickle string-represented bytes
388
+ try:
389
+ actual_bytes = ast.literal_eval(stored_value)
390
+ if isinstance(actual_bytes, bytes):
391
+ return pickle.loads(actual_bytes) # noqa: S301
392
+ except (ValueError, SyntaxError, pickle.UnpicklingError) as e:
393
+ logger.warning(
394
+ "Failed to unpickle string-represented bytes for parameter '%s': %s",
395
+ param_name,
396
+ e,
397
+ )
398
+ return stored_value
399
+ return stored_value
400
+
401
+ def _apply_parameter_values_to_node(
402
+ self, node: BaseNode, parameter_output_values: dict[str, Any], output_parameter_prefix: str
403
+ ) -> None:
404
+ """Apply deserialized parameter values back to the node.
405
+
406
+ Sets parameter values on the node and updates parameter_output_values dictionary.
407
+ """
408
+ # If the packaged flow fails, the End Flow Node in the library published workflow will have entered from 'failed'. That means that running the node failed, but was caught by the published flow.
409
+ # In this case, we should fail the node, since it didn't complete properly.
410
+ if "failed" in parameter_output_values and parameter_output_values["failed"] == CONTROL_INPUT_PARAMETER:
411
+ msg = f"Failed to execute node: {node.name}, with exception: {parameter_output_values.get('result_details', 'No result details were returned.')}"
412
+ raise RuntimeError(msg)
413
+ for param_name, param_value in parameter_output_values.items():
414
+ # We are grabbing all of the parameters on our end nodes that align with the node being published.
415
+ if param_name.startswith(output_parameter_prefix):
416
+ clean_param_name = param_name[len(output_parameter_prefix) :]
417
+ # If the parameter exists on the node, then we need to set those values on the node.
418
+ parameter = node.get_parameter_by_name(clean_param_name)
419
+ # Don't set execution_environment, since that will be set to Local Execution on any published flow.
420
+ if parameter is None:
421
+ msg = (
422
+ "Parameter '%s' from parameter output values not found on node '%s'",
423
+ clean_param_name,
424
+ node.name,
425
+ )
426
+ logger.error(msg)
427
+ raise RuntimeError(msg)
428
+ if parameter != node.execution_environment:
429
+ if parameter.type != ParameterTypeBuiltin.CONTROL_TYPE:
430
+ # If the node is control type, only set its value in parameter_output_values.
431
+ node.set_parameter_value(clean_param_name, param_value)
432
+ node.parameter_output_values[clean_param_name] = param_value
433
+
434
+ async def _delete_workflow(self, workflow_name: str, workflow_path: Path) -> None:
435
+ try:
436
+ WorkflowRegistry.get_workflow_by_name(workflow_name)
437
+ except KeyError:
438
+ # Register the workflow if not already registered since a subprocess may have created it
439
+ load_workflow_metadata_request = LoadWorkflowMetadata(file_name=workflow_path.name)
440
+ result = GriptapeNodes.handle_request(load_workflow_metadata_request)
441
+ if isinstance(result, LoadWorkflowMetadataResultSuccess):
442
+ WorkflowRegistry.generate_new_workflow(str(workflow_path), result.metadata)
443
+
444
+ delete_request = DeleteWorkflowRequest(name=workflow_name)
445
+ delete_result = GriptapeNodes.handle_request(delete_request)
446
+ if isinstance(delete_result, DeleteWorkflowResultFailure):
447
+ logger.error(
448
+ "Failed to delete workflow '%s'. Error: %s",
449
+ workflow_name,
450
+ delete_result.result_details,
451
+ )
452
+ else:
453
+ logger.info(
454
+ "Cleanup result for workflow '%s': %s",
455
+ workflow_name,
456
+ delete_result.result_details,
457
+ )
458
+
459
+ async def _get_storage_backend(self) -> StorageBackend:
460
+ storage_backend_str = GriptapeNodes.ConfigManager().get_config_value("storage_backend")
461
+ # Convert string to StorageBackend enum
462
+ try:
463
+ storage_backend = StorageBackend(storage_backend_str)
464
+ except ValueError:
465
+ storage_backend = StorageBackend.LOCAL
466
+ return storage_backend
@@ -27,17 +27,6 @@ class BaseStorageDriver(ABC):
27
27
  """
28
28
  self.workspace_directory = workspace_directory
29
29
 
30
- def _get_full_path(self, path: Path) -> Path:
31
- """Get the full path by joining workspace directory with the given path.
32
-
33
- Args:
34
- path: The relative path to join with workspace directory.
35
-
36
- Returns:
37
- The full path as workspace_directory / path.
38
- """
39
- return self.workspace_directory / path
40
-
41
30
  @abstractmethod
42
31
  def create_signed_upload_url(self, path: Path) -> CreateSignedUploadUrlResponse:
43
32
  """Create a signed upload URL for the given path.
@@ -19,7 +19,6 @@ class GriptapeCloudStorageDriver(BaseStorageDriver):
19
19
  *,
20
20
  bucket_id: str,
21
21
  api_key: str | None = None,
22
- static_files_directory: str | None = None,
23
22
  **kwargs,
24
23
  ) -> None:
25
24
  """Initialize the GriptapeCloudStorageDriver.
@@ -38,26 +37,11 @@ class GriptapeCloudStorageDriver(BaseStorageDriver):
38
37
  self.headers = kwargs.get("headers") or {"Authorization": f"Bearer {self.api_key}"}
39
38
 
40
39
  self.bucket_id = bucket_id
41
- self.static_files_directory = static_files_directory
42
-
43
- def _get_full_file_path(self, path: Path) -> str:
44
- """Get the full file path including workspace directory and static files directory prefix.
45
-
46
- Args:
47
- path: The relative path from the workspace directory.
48
-
49
- Returns:
50
- The full file path with static files directory prefix if configured.
51
- """
52
- if self.static_files_directory:
53
- return f"{self.static_files_directory}/{path}"
54
- return str(path)
55
40
 
56
41
  def create_signed_upload_url(self, path: Path) -> CreateSignedUploadUrlResponse:
57
- full_file_path = self._get_full_file_path(path)
58
- self._create_asset(full_file_path)
42
+ self._create_asset(path.as_posix())
59
43
 
60
- url = urljoin(self.base_url, f"/api/buckets/{self.bucket_id}/asset-urls/{full_file_path}")
44
+ url = urljoin(self.base_url, f"/api/buckets/{self.bucket_id}/asset-urls/{path.as_posix()}")
61
45
  try:
62
46
  response = httpx.post(url, json={"operation": "PUT"}, headers=self.headers)
63
47
  response.raise_for_status()
@@ -71,8 +55,7 @@ class GriptapeCloudStorageDriver(BaseStorageDriver):
71
55
  return {"url": response_data["url"], "headers": response_data.get("headers", {}), "method": "PUT"}
72
56
 
73
57
  def create_signed_download_url(self, path: Path) -> str:
74
- full_file_path = self._get_full_file_path(path)
75
- url = urljoin(self.base_url, f"/api/buckets/{self.bucket_id}/asset-urls/{full_file_path}")
58
+ url = urljoin(self.base_url, f"/api/buckets/{self.bucket_id}/asset-urls/{path.as_posix()}")
76
59
  try:
77
60
  response = httpx.post(url, json={"method": "GET"}, headers=self.headers)
78
61
  response.raise_for_status()
@@ -141,7 +124,7 @@ class GriptapeCloudStorageDriver(BaseStorageDriver):
141
124
  """
142
125
  url = urljoin(self.base_url, f"/api/buckets/{self.bucket_id}/assets")
143
126
  try:
144
- response = httpx.get(url, headers=self.headers, params={"prefix": self.static_files_directory or ""})
127
+ response = httpx.get(url, headers=self.headers, params={"prefix": self.workspace_directory.name or ""})
145
128
  response.raise_for_status()
146
129
  except httpx.HTTPStatusError as e:
147
130
  msg = f"Failed to list files in bucket {self.bucket_id}: {e}"
@@ -155,8 +138,8 @@ class GriptapeCloudStorageDriver(BaseStorageDriver):
155
138
  for asset in assets:
156
139
  name = asset.get("name", "")
157
140
  # Remove the static files directory prefix if it exists
158
- if self.static_files_directory and name.startswith(f"{self.static_files_directory}/"):
159
- name = name[len(f"{self.static_files_directory}/") :]
141
+ if self.workspace_directory and name.startswith(f"{self.workspace_directory.name}/"):
142
+ name = name[len(f"{self.workspace_directory.name}/") :]
160
143
  file_names.append(name)
161
144
 
162
145
  return file_names
@@ -191,8 +174,7 @@ class GriptapeCloudStorageDriver(BaseStorageDriver):
191
174
  Args:
192
175
  path: The path of the file to delete.
193
176
  """
194
- full_file_path = self._get_full_file_path(path)
195
- url = urljoin(self.base_url, f"/api/buckets/{self.bucket_id}/assets/{full_file_path}")
177
+ url = urljoin(self.base_url, f"/api/buckets/{self.bucket_id}/assets/{path.as_posix()}")
196
178
 
197
179
  try:
198
180
  response = httpx.delete(url, headers=self.headers)