griptape-nodes 0.46.0__py3-none-any.whl → 0.48.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. griptape_nodes/app/app.py +1 -1
  2. griptape_nodes/exe_types/core_types.py +129 -10
  3. griptape_nodes/exe_types/node_types.py +9 -3
  4. griptape_nodes/machines/node_resolution.py +10 -8
  5. griptape_nodes/mcp_server/ws_request_manager.py +6 -6
  6. griptape_nodes/retained_mode/events/base_events.py +74 -1
  7. griptape_nodes/retained_mode/events/secrets_events.py +2 -0
  8. griptape_nodes/retained_mode/griptape_nodes.py +17 -13
  9. griptape_nodes/retained_mode/managers/agent_manager.py +8 -6
  10. griptape_nodes/retained_mode/managers/arbitrary_code_exec_manager.py +1 -1
  11. griptape_nodes/retained_mode/managers/config_manager.py +36 -45
  12. griptape_nodes/retained_mode/managers/flow_manager.py +98 -98
  13. griptape_nodes/retained_mode/managers/library_manager.py +57 -57
  14. griptape_nodes/retained_mode/managers/node_manager.py +121 -124
  15. griptape_nodes/retained_mode/managers/object_manager.py +9 -10
  16. griptape_nodes/retained_mode/managers/os_manager.py +31 -31
  17. griptape_nodes/retained_mode/managers/secrets_manager.py +5 -5
  18. griptape_nodes/retained_mode/managers/static_files_manager.py +19 -21
  19. griptape_nodes/retained_mode/managers/sync_manager.py +3 -2
  20. griptape_nodes/retained_mode/managers/workflow_manager.py +153 -174
  21. griptape_nodes/retained_mode/retained_mode.py +25 -47
  22. {griptape_nodes-0.46.0.dist-info → griptape_nodes-0.48.0.dist-info}/METADATA +1 -1
  23. {griptape_nodes-0.46.0.dist-info → griptape_nodes-0.48.0.dist-info}/RECORD +25 -25
  24. {griptape_nodes-0.46.0.dist-info → griptape_nodes-0.48.0.dist-info}/WHEEL +1 -1
  25. {griptape_nodes-0.46.0.dist-info → griptape_nodes-0.48.0.dist-info}/entry_points.txt +0 -0
@@ -55,7 +55,7 @@ class ObjectManager:
55
55
  if source_obj is None:
56
56
  details = f"Attempted to rename object '{request.object_name}', but no object of that name could be found."
57
57
  logger.error(details)
58
- return RenameObjectResultFailure(next_available_name=None)
58
+ return RenameObjectResultFailure(next_available_name=None, result_details=details)
59
59
 
60
60
  # Is there a collision?
61
61
  requested_name_obj = self.attempt_get_object_by_name(request.requested_name)
@@ -73,7 +73,7 @@ class ObjectManager:
73
73
  # Fail it but be nice and offer the next name that WOULD HAVE been available.
74
74
  details = f"Attempted to rename object '{request.object_name}' to '{request.requested_name}'. Failed because another object of that name exists. Next available name would have been '{next_name}'."
75
75
  logger.error(details)
76
- return RenameObjectResultFailure(next_available_name=next_name)
76
+ return RenameObjectResultFailure(next_available_name=next_name, result_details=details)
77
77
  # We'll use the next available name.
78
78
  final_name = next_name
79
79
 
@@ -86,7 +86,7 @@ class ObjectManager:
86
86
  case _:
87
87
  details = f"Attempted to rename an object named '{request.object_name}', but that object wasn't of a type supported for rename."
88
88
  logger.error(details)
89
- return RenameObjectResultFailure(next_available_name=None)
89
+ return RenameObjectResultFailure(next_available_name=None, result_details=details)
90
90
 
91
91
  # Update the object table.
92
92
  self._name_to_objects[final_name] = source_obj
@@ -102,10 +102,9 @@ class ObjectManager:
102
102
 
103
103
  def on_clear_all_object_state_request(self, request: ClearAllObjectStateRequest) -> ResultPayload: # noqa: C901
104
104
  if not request.i_know_what_im_doing:
105
- logger.warning(
106
- "Attempted to clear all object state and delete everything. Failed because they didn't know what they were doing."
107
- )
108
- return ClearAllObjectStateResultFailure()
105
+ details = "Attempted to clear all object state and delete everything. Failed because they didn't know what they were doing."
106
+ logger.warning(details)
107
+ return ClearAllObjectStateResultFailure(result_details=details)
109
108
  # Let's try and clear it all.
110
109
  # Cancel any running flows.
111
110
  flows = self.get_filtered_subset(type=ControlFlow)
@@ -115,7 +114,7 @@ class ObjectManager:
115
114
  if not result.succeeded():
116
115
  details = f"Attempted to clear all object state and delete everything. Failed because running flow '{flow_name}' could not cancel."
117
116
  logger.error(details)
118
- return ClearAllObjectStateResultFailure()
117
+ return ClearAllObjectStateResultFailure(result_details=details)
119
118
 
120
119
  try:
121
120
  # Reset global execution state first to eliminate all references before deletion
@@ -123,7 +122,7 @@ class ObjectManager:
123
122
  except Exception as e:
124
123
  details = f"Attempted to reset global execution state. Failed with exception: {e}"
125
124
  logger.error(details)
126
- return ClearAllObjectStateResultFailure()
125
+ return ClearAllObjectStateResultFailure(result_details=details)
127
126
 
128
127
  try:
129
128
  # Delete the existing flows, which will clear all nodes and connections.
@@ -131,7 +130,7 @@ class ObjectManager:
131
130
  except Exception as e:
132
131
  details = f"Attempted to clear all object state and delete everything. Failed with exception: {e}"
133
132
  logger.error(details)
134
- return ClearAllObjectStateResultFailure()
133
+ return ClearAllObjectStateResultFailure(result_details=details)
135
134
 
136
135
  # Clear the current context.
137
136
  context_mgr = GriptapeNodes.ContextManager()
@@ -218,17 +218,17 @@ class OSManager:
218
218
  sys.stdout.flush() # Recommended here https://docs.python.org/3/library/os.html#os.execvpe
219
219
  os.execvp(args[0], args) # noqa: S606
220
220
 
221
- def on_open_associated_file_request(self, request: OpenAssociatedFileRequest) -> ResultPayload: # noqa: C901, PLR0911, PLR0912
221
+ def on_open_associated_file_request(self, request: OpenAssociatedFileRequest) -> ResultPayload: # noqa: PLR0911, PLR0912, PLR0915, C901
222
222
  # Validate that exactly one of path_to_file or file_entry is provided
223
223
  if request.path_to_file is None and request.file_entry is None:
224
224
  msg = "Either path_to_file or file_entry must be provided"
225
225
  logger.error(msg)
226
- return OpenAssociatedFileResultFailure()
226
+ return OpenAssociatedFileResultFailure(result_details=msg)
227
227
 
228
228
  if request.path_to_file is not None and request.file_entry is not None:
229
229
  msg = "Only one of path_to_file or file_entry should be provided, not both"
230
230
  logger.error(msg)
231
- return OpenAssociatedFileResultFailure()
231
+ return OpenAssociatedFileResultFailure(result_details=msg)
232
232
 
233
233
  # Get the file path to open
234
234
  if request.file_entry is not None:
@@ -241,13 +241,13 @@ class OSManager:
241
241
  # This should never happen due to validation above, but type checker needs it
242
242
  msg = "No valid file path provided"
243
243
  logger.error(msg)
244
- return OpenAssociatedFileResultFailure()
244
+ return OpenAssociatedFileResultFailure(result_details=msg)
245
245
 
246
246
  # At this point, file_path_str is guaranteed to be a string
247
247
  if file_path_str is None:
248
248
  msg = "No valid file path provided"
249
249
  logger.error(msg)
250
- return OpenAssociatedFileResultFailure()
250
+ return OpenAssociatedFileResultFailure(result_details=msg)
251
251
 
252
252
  # Sanitize and validate the path (file or directory)
253
253
  try:
@@ -256,12 +256,12 @@ class OSManager:
256
256
  except (ValueError, RuntimeError):
257
257
  details = f"Invalid file path: '{file_path_str}'"
258
258
  logger.info(details)
259
- return OpenAssociatedFileResultFailure()
259
+ return OpenAssociatedFileResultFailure(result_details=details)
260
260
 
261
261
  if not path.exists():
262
262
  details = f"Path does not exist: '{path}'"
263
263
  logger.info(details)
264
- return OpenAssociatedFileResultFailure()
264
+ return OpenAssociatedFileResultFailure(result_details=details)
265
265
 
266
266
  logger.info("Attempting to open path: %s on platform: %s", path, sys.platform)
267
267
 
@@ -288,8 +288,9 @@ class OSManager:
288
288
 
289
289
  xdg_path = next((p for p in xdg_paths if Path(p).exists()), None)
290
290
  if not xdg_path:
291
- logger.info("xdg-open not found in standard locations")
292
- return OpenAssociatedFileResultFailure()
291
+ details = "xdg-open not found in standard locations"
292
+ logger.info(details)
293
+ return OpenAssociatedFileResultFailure(result_details=details)
293
294
 
294
295
  subprocess.run( # noqa: S603
295
296
  [xdg_path, str(path)],
@@ -301,20 +302,19 @@ class OSManager:
301
302
  else:
302
303
  details = f"Unsupported platform: '{platform_name}'"
303
304
  logger.info(details)
304
- return OpenAssociatedFileResultFailure()
305
+ return OpenAssociatedFileResultFailure(result_details=details)
305
306
 
306
307
  return OpenAssociatedFileResultSuccess()
307
308
  except subprocess.CalledProcessError as e:
308
- logger.error(
309
- "Process error when opening file: return code=%s, stdout=%s, stderr=%s",
310
- e.returncode,
311
- e.stdout,
312
- e.stderr,
309
+ details = (
310
+ f"Process error when opening file: return code={e.returncode}, stdout={e.stdout}, stderr={e.stderr}"
313
311
  )
314
- return OpenAssociatedFileResultFailure()
312
+ logger.error(details)
313
+ return OpenAssociatedFileResultFailure(result_details=details)
315
314
  except Exception as e:
316
- logger.error("Exception occurred when trying to open path: %s", type(e).__name__)
317
- return OpenAssociatedFileResultFailure()
315
+ details = f"Exception occurred when trying to open path: {e}"
316
+ logger.error(details)
317
+ return OpenAssociatedFileResultFailure(result_details=details)
318
318
 
319
319
  def _detect_mime_type(self, file_path: Path) -> str | None:
320
320
  """Detect MIME type for a file. Returns None for directories or if detection fails."""
@@ -349,18 +349,18 @@ class OSManager:
349
349
  if not directory.exists():
350
350
  msg = f"Directory does not exist: {directory}"
351
351
  logger.error(msg)
352
- return ListDirectoryResultFailure()
352
+ return ListDirectoryResultFailure(result_details=msg)
353
353
  if not directory.is_dir():
354
354
  msg = f"Directory is not a directory: {directory}"
355
355
  logger.error(msg)
356
- return ListDirectoryResultFailure()
356
+ return ListDirectoryResultFailure(result_details=msg)
357
357
 
358
358
  # Check workspace constraints
359
359
  is_workspace_path, relative_or_abs_path = self._validate_workspace_path(directory)
360
360
  if request.workspace_only and not is_workspace_path:
361
361
  msg = f"Directory is outside workspace: {directory}"
362
362
  logger.error(msg)
363
- return ListDirectoryResultFailure()
363
+ return ListDirectoryResultFailure(result_details=msg)
364
364
 
365
365
  entries = []
366
366
  try:
@@ -393,7 +393,7 @@ class OSManager:
393
393
  except (OSError, PermissionError) as e:
394
394
  msg = f"Error listing directory {directory}: {e}"
395
395
  logger.error(msg)
396
- return ListDirectoryResultFailure()
396
+ return ListDirectoryResultFailure(result_details=msg)
397
397
 
398
398
  # Return appropriate path format based on mode
399
399
  if request.workspace_only:
@@ -409,7 +409,7 @@ class OSManager:
409
409
  except Exception as e:
410
410
  msg = f"Unexpected error in list_directory: {type(e).__name__}: {e}"
411
411
  logger.error(msg)
412
- return ListDirectoryResultFailure()
412
+ return ListDirectoryResultFailure(result_details=msg)
413
413
 
414
414
  def on_read_file_request(self, request: ReadFileRequest) -> ResultPayload:
415
415
  """Handle a request to read file contents with automatic text/binary detection."""
@@ -436,7 +436,7 @@ class OSManager:
436
436
  file_info = f" for file: {file_path}" if file_path is not None else ""
437
437
  msg = f"Validation error in read_file{file_info}: {e}"
438
438
  logger.error(msg)
439
- return ReadFileResultFailure()
439
+ return ReadFileResultFailure(result_details=msg)
440
440
  except Exception as e:
441
441
  # Try to include file path in error message if available
442
442
  path_info = ""
@@ -447,7 +447,7 @@ class OSManager:
447
447
 
448
448
  msg = f"Unexpected error in read_file{path_info}: {type(e).__name__}: {e}"
449
449
  logger.error(msg)
450
- return ReadFileResultFailure()
450
+ return ReadFileResultFailure(result_details=msg)
451
451
 
452
452
  def _read_file_content(
453
453
  self, file_path: Path, request: ReadFileRequest
@@ -748,7 +748,7 @@ class OSManager:
748
748
  if request.workspace_only and is_absolute:
749
749
  msg = f"Absolute path is outside workspace: {full_path_str}"
750
750
  logger.error(msg)
751
- return CreateFileResultFailure()
751
+ return CreateFileResultFailure(result_details=msg)
752
752
 
753
753
  # Resolve path - if absolute, use as-is; if relative, align to workspace
754
754
  if is_absolute:
@@ -783,7 +783,7 @@ class OSManager:
783
783
  path_info = request.get_full_path() if hasattr(request, "get_full_path") else str(request.path)
784
784
  msg = f"Failed to create {'directory' if request.is_directory else 'file'} at {path_info}: {e}"
785
785
  logger.error(msg)
786
- return CreateFileResultFailure()
786
+ return CreateFileResultFailure(result_details=msg)
787
787
 
788
788
  def on_rename_file_request(self, request: RenameFileRequest) -> ResultPayload:
789
789
  """Handle a request to rename a file or directory."""
@@ -798,13 +798,13 @@ class OSManager:
798
798
  if not old_path.exists():
799
799
  msg = f"Source path does not exist: {old_path}"
800
800
  logger.error(msg)
801
- return RenameFileResultFailure()
801
+ return RenameFileResultFailure(result_details=msg)
802
802
 
803
803
  # Check if new path already exists
804
804
  if new_path.exists():
805
805
  msg = f"Destination path already exists: {new_path}"
806
806
  logger.error(msg)
807
- return RenameFileResultFailure()
807
+ return RenameFileResultFailure(result_details=msg)
808
808
 
809
809
  # Check workspace constraints for both paths
810
810
  is_old_in_workspace, _ = self._validate_workspace_path(old_path)
@@ -813,7 +813,7 @@ class OSManager:
813
813
  if request.workspace_only and (not is_old_in_workspace or not is_new_in_workspace):
814
814
  msg = f"One or both paths are outside workspace: {old_path} -> {new_path}"
815
815
  logger.error(msg)
816
- return RenameFileResultFailure()
816
+ return RenameFileResultFailure(result_details=msg)
817
817
 
818
818
  # Create parent directories for new path if needed
819
819
  new_path.parent.mkdir(parents=True, exist_ok=True)
@@ -827,4 +827,4 @@ class OSManager:
827
827
  except Exception as e:
828
828
  msg = f"Failed to rename {request.old_path} to {request.new_path}: {e}"
829
829
  logger.error(msg)
830
- return RenameFileResultFailure()
830
+ return RenameFileResultFailure(result_details=msg)
@@ -53,12 +53,12 @@ class SecretsManager:
53
53
 
54
54
  def on_handle_get_secret_request(self, request: GetSecretValueRequest) -> ResultPayload:
55
55
  secret_key = SecretsManager._apply_secret_name_compliance(request.key)
56
- secret_value = self.get_secret(secret_key)
56
+ secret_value = self.get_secret(secret_key, should_error_on_not_found=request.should_error_on_not_found)
57
57
 
58
- if secret_value is None:
58
+ if secret_value is None and request.should_error_on_not_found:
59
59
  details = f"Secret '{secret_key}' not found."
60
60
  logger.error(details)
61
- return GetSecretValueResultFailure()
61
+ return GetSecretValueResultFailure(result_details=details)
62
62
 
63
63
  return GetSecretValueResultSuccess(value=secret_value)
64
64
 
@@ -90,12 +90,12 @@ class SecretsManager:
90
90
  if not ENV_VAR_PATH.exists():
91
91
  details = f"Secret file does not exist: '{ENV_VAR_PATH}'"
92
92
  logger.error(details)
93
- return DeleteSecretValueResultFailure()
93
+ return DeleteSecretValueResultFailure(result_details=details)
94
94
 
95
95
  if get_key(ENV_VAR_PATH, secret_name) is None:
96
96
  details = f"Secret {secret_name} not found in {ENV_VAR_PATH}"
97
97
  logger.error(details)
98
- return DeleteSecretValueResultFailure()
98
+ return DeleteSecretValueResultFailure(result_details=details)
99
99
 
100
100
  unset_key(ENV_VAR_PATH, secret_name)
101
101
 
@@ -50,21 +50,22 @@ class StaticFilesManager:
50
50
 
51
51
  match storage_backend:
52
52
  case StorageBackend.GTC:
53
- bucket_id = secrets_manager.get_secret("GT_CLOUD_BUCKET_ID")
53
+ bucket_id = secrets_manager.get_secret("GT_CLOUD_BUCKET_ID", should_error_on_not_found=False)
54
54
 
55
55
  if not bucket_id:
56
- msg = "GT_CLOUD_BUCKET_ID secret is required for gtc storage backend"
57
- logger.error(msg)
58
- raise ValueError(msg)
59
-
60
- static_files_directory = config_manager.get_config_value(
61
- "static_files_directory", default="staticfiles"
62
- )
63
- self.storage_driver = GriptapeCloudStorageDriver(
64
- bucket_id=bucket_id,
65
- api_key=secrets_manager.get_secret("GT_CLOUD_API_KEY"),
66
- static_files_directory=static_files_directory,
67
- )
56
+ logger.warning(
57
+ "GT_CLOUD_BUCKET_ID secret is not available, falling back to local storage. Run `gtn init` to set it up."
58
+ )
59
+ self.storage_driver = LocalStorageDriver()
60
+ else:
61
+ static_files_directory = config_manager.get_config_value(
62
+ "static_files_directory", default="staticfiles"
63
+ )
64
+ self.storage_driver = GriptapeCloudStorageDriver(
65
+ bucket_id=bucket_id,
66
+ api_key=secrets_manager.get_secret("GT_CLOUD_API_KEY"),
67
+ static_files_directory=static_files_directory,
68
+ )
68
69
  case StorageBackend.LOCAL:
69
70
  self.storage_driver = LocalStorageDriver()
70
71
  case _:
@@ -93,14 +94,14 @@ class StaticFilesManager:
93
94
  except (binascii.Error, ValueError) as e:
94
95
  msg = f"Failed to decode base64 content for file {file_name}: {e}"
95
96
  logger.error(msg)
96
- return CreateStaticFileResultFailure(error=msg)
97
+ return CreateStaticFileResultFailure(error=msg, result_details=msg)
97
98
 
98
99
  try:
99
100
  url = self.save_static_file(content_bytes, file_name)
100
101
  except ValueError as e:
101
102
  msg = f"Failed to create static file for file {file_name}: {e}"
102
103
  logger.error(msg)
103
- return CreateStaticFileResultFailure(error=msg)
104
+ return CreateStaticFileResultFailure(error=msg, result_details=msg)
104
105
 
105
106
  return CreateStaticFileResultSuccess(url=url)
106
107
 
@@ -122,7 +123,7 @@ class StaticFilesManager:
122
123
  except ValueError as e:
123
124
  msg = f"Failed to create presigned URL for file {file_name}: {e}"
124
125
  logger.error(msg)
125
- return CreateStaticFileUploadUrlResultFailure(error=msg)
126
+ return CreateStaticFileUploadUrlResultFailure(error=msg, result_details=msg)
126
127
 
127
128
  return CreateStaticFileUploadUrlResultSuccess(
128
129
  url=response["url"], headers=response["headers"], method=response["method"]
@@ -146,7 +147,7 @@ class StaticFilesManager:
146
147
  except ValueError as e:
147
148
  msg = f"Failed to create presigned URL for file {file_name}: {e}"
148
149
  logger.error(msg)
149
- return CreateStaticFileDownloadUrlResultFailure(error=msg)
150
+ return CreateStaticFileDownloadUrlResultFailure(error=msg, result_details=msg)
150
151
 
151
152
  return CreateStaticFileDownloadUrlResultSuccess(url=url)
152
153
 
@@ -166,10 +167,7 @@ class StaticFilesManager:
166
167
 
167
168
  try:
168
169
  response = httpx.request(
169
- response["method"],
170
- response["url"],
171
- content=data,
172
- headers=response["headers"],
170
+ response["method"], response["url"], content=data, headers=response["headers"], timeout=60
173
171
  )
174
172
  response.raise_for_status()
175
173
  except httpx.HTTPStatusError as e:
@@ -144,8 +144,9 @@ class SyncManager:
144
144
  self._active_sync_tasks[sync_task_id] = sync_thread
145
145
  sync_thread.start()
146
146
  except Exception as e:
147
- logger.error("Failed to start cloud workflow sync: %s", str(e))
148
- return StartSyncAllCloudWorkflowsResultFailure()
147
+ details = f"Failed to start cloud workflow sync: {e!s}"
148
+ logger.error(details)
149
+ return StartSyncAllCloudWorkflowsResultFailure(result_details=details)
149
150
  else:
150
151
  logger.info("Started background sync for %d workflow files", len(workflow_files))
151
152
  return StartSyncAllCloudWorkflowsResultSuccess(