griptape-nodes 0.71.0__py3-none-any.whl → 0.72.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. griptape_nodes/app/app.py +4 -0
  2. griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +10 -1
  3. griptape_nodes/bootstrap/workflow_executors/utils/subprocess_script.py +4 -0
  4. griptape_nodes/bootstrap/workflow_publishers/utils/subprocess_script.py +4 -0
  5. griptape_nodes/common/node_executor.py +1 -1
  6. griptape_nodes/drivers/image_metadata/__init__.py +21 -0
  7. griptape_nodes/drivers/image_metadata/base_image_metadata_driver.py +63 -0
  8. griptape_nodes/drivers/image_metadata/exif_metadata_driver.py +218 -0
  9. griptape_nodes/drivers/image_metadata/image_metadata_driver_registry.py +55 -0
  10. griptape_nodes/drivers/image_metadata/png_metadata_driver.py +71 -0
  11. griptape_nodes/drivers/storage/base_storage_driver.py +32 -0
  12. griptape_nodes/drivers/storage/griptape_cloud_storage_driver.py +384 -10
  13. griptape_nodes/drivers/storage/local_storage_driver.py +65 -4
  14. griptape_nodes/drivers/thread_storage/local_thread_storage_driver.py +1 -0
  15. griptape_nodes/exe_types/node_groups/base_node_group.py +3 -0
  16. griptape_nodes/exe_types/node_types.py +13 -0
  17. griptape_nodes/exe_types/param_components/log_parameter.py +3 -2
  18. griptape_nodes/exe_types/param_types/parameter_float.py +4 -4
  19. griptape_nodes/exe_types/param_types/parameter_int.py +4 -4
  20. griptape_nodes/exe_types/param_types/parameter_number.py +34 -30
  21. griptape_nodes/node_library/workflow_registry.py +5 -8
  22. griptape_nodes/retained_mode/events/app_events.py +1 -0
  23. griptape_nodes/retained_mode/events/base_events.py +42 -26
  24. griptape_nodes/retained_mode/events/flow_events.py +67 -0
  25. griptape_nodes/retained_mode/events/library_events.py +1 -1
  26. griptape_nodes/retained_mode/events/node_events.py +1 -0
  27. griptape_nodes/retained_mode/events/os_events.py +22 -0
  28. griptape_nodes/retained_mode/events/static_file_events.py +28 -4
  29. griptape_nodes/retained_mode/managers/flow_manager.py +134 -0
  30. griptape_nodes/retained_mode/managers/image_metadata_injector.py +339 -0
  31. griptape_nodes/retained_mode/managers/library_manager.py +71 -41
  32. griptape_nodes/retained_mode/managers/model_manager.py +1 -0
  33. griptape_nodes/retained_mode/managers/node_manager.py +8 -5
  34. griptape_nodes/retained_mode/managers/os_manager.py +269 -32
  35. griptape_nodes/retained_mode/managers/project_manager.py +3 -7
  36. griptape_nodes/retained_mode/managers/session_manager.py +1 -0
  37. griptape_nodes/retained_mode/managers/settings.py +5 -0
  38. griptape_nodes/retained_mode/managers/static_files_manager.py +83 -17
  39. griptape_nodes/retained_mode/managers/workflow_manager.py +71 -41
  40. griptape_nodes/servers/static.py +34 -0
  41. griptape_nodes/traits/clamp.py +52 -9
  42. griptape_nodes/utils/__init__.py +9 -1
  43. griptape_nodes/utils/file_utils.py +13 -13
  44. griptape_nodes/utils/http_file_patch.py +613 -0
  45. griptape_nodes/utils/path_utils.py +58 -0
  46. griptape_nodes/utils/url_utils.py +106 -0
  47. {griptape_nodes-0.71.0.dist-info → griptape_nodes-0.72.1.dist-info}/METADATA +2 -1
  48. {griptape_nodes-0.71.0.dist-info → griptape_nodes-0.72.1.dist-info}/RECORD +50 -41
  49. {griptape_nodes-0.71.0.dist-info → griptape_nodes-0.72.1.dist-info}/WHEEL +1 -1
  50. {griptape_nodes-0.71.0.dist-info → griptape_nodes-0.72.1.dist-info}/entry_points.txt +0 -0
@@ -193,6 +193,7 @@ from griptape_nodes.utils.library_utils import (
193
193
  filter_old_xdg_library_paths,
194
194
  is_monorepo,
195
195
  )
196
+ from griptape_nodes.utils.path_utils import resolve_workspace_path
196
197
  from griptape_nodes.utils.uv_utils import find_uv_bin
197
198
  from griptape_nodes.utils.version_utils import get_complete_version_string
198
199
 
@@ -2428,7 +2429,18 @@ class LibraryManager:
2428
2429
  # Collect results
2429
2430
  return dict(task.result() for task in tasks)
2430
2431
 
2431
- async def on_app_initialization_complete(self, _payload: AppInitializationComplete) -> None:
2432
+ async def on_app_initialization_complete(self, payload: AppInitializationComplete) -> None:
2433
+ if payload.skip_library_loading:
2434
+ # Register all secrets even in headless mode
2435
+ GriptapeNodes.SecretsManager().register_all_secrets()
2436
+
2437
+ # Still need to tell WorkflowManager to register workflows
2438
+ # Pass the specific workflows if provided, otherwise it will scan workspace
2439
+ GriptapeNodes.WorkflowManager().on_libraries_initialization_complete(
2440
+ workflows_to_register=payload.workflows_to_register
2441
+ )
2442
+ return
2443
+
2432
2444
  # Automatically migrate old XDG library paths from config
2433
2445
  # TODO: Remove https://github.com/griptape-ai/griptape-nodes/issues/3348
2434
2446
  self._migrate_old_xdg_library_paths()
@@ -2530,9 +2542,7 @@ class LibraryManager:
2530
2542
  return None
2531
2543
 
2532
2544
  # Resolve relative path to absolute path
2533
- advanced_library_module_path = Path(library_data.advanced_library_path)
2534
- if not advanced_library_module_path.is_absolute():
2535
- advanced_library_module_path = base_dir / advanced_library_module_path
2545
+ advanced_library_module_path = resolve_workspace_path(Path(library_data.advanced_library_path), base_dir)
2536
2546
 
2537
2547
  # Load the module (supports hot reloading)
2538
2548
  try:
@@ -2624,9 +2634,7 @@ class LibraryManager:
2624
2634
  # Process each node in the metadata
2625
2635
  for node_definition in library_data.nodes:
2626
2636
  # Resolve relative path to absolute path
2627
- node_file_path = Path(node_definition.file_path)
2628
- if not node_file_path.is_absolute():
2629
- node_file_path = base_dir / node_file_path
2637
+ node_file_path = resolve_workspace_path(Path(node_definition.file_path), base_dir)
2630
2638
 
2631
2639
  try:
2632
2640
  # Dynamically load the module containing the node class
@@ -2987,6 +2995,37 @@ class LibraryManager:
2987
2995
  )
2988
2996
  return ReloadAllLibrariesResultSuccess(result_details=ResultDetails(message=details, level=logging.INFO))
2989
2997
 
2998
+ def _create_library_info_entry(self, file_path_str: str, *, is_sandbox: bool) -> None:
2999
+ """Create a LibraryInfo entry for a discovered library.
3000
+
3001
+ Loads metadata if possible and creates the entry in the appropriate lifecycle state.
3002
+ Only creates the entry if it doesn't already exist in tracking.
3003
+ """
3004
+ if file_path_str in self._library_file_path_to_info:
3005
+ return
3006
+
3007
+ metadata_result = self.load_library_metadata_from_file_request(
3008
+ LoadLibraryMetadataFromFileRequest(file_path=file_path_str)
3009
+ )
3010
+
3011
+ library_name = None
3012
+ library_version = None
3013
+ lifecycle_state = LibraryManager.LibraryLifecycleState.DISCOVERED
3014
+
3015
+ if isinstance(metadata_result, LoadLibraryMetadataFromFileResultSuccess):
3016
+ library_name = metadata_result.library_schema.name
3017
+ library_version = metadata_result.library_schema.metadata.library_version
3018
+ lifecycle_state = LibraryManager.LibraryLifecycleState.METADATA_LOADED
3019
+
3020
+ self._library_file_path_to_info[file_path_str] = LibraryManager.LibraryInfo(
3021
+ lifecycle_state=lifecycle_state,
3022
+ fitness=LibraryManager.LibraryFitness.NOT_EVALUATED,
3023
+ library_path=file_path_str,
3024
+ is_sandbox=is_sandbox,
3025
+ library_name=library_name,
3026
+ library_version=library_version,
3027
+ )
3028
+
2990
3029
  def discover_libraries_request(
2991
3030
  self,
2992
3031
  request: DiscoverLibrariesRequest,
@@ -2997,14 +3036,15 @@ class LibraryManager:
2997
3036
  Scans configured library paths and creates LibraryInfo entries in DISCOVERED state.
2998
3037
  """
2999
3038
  try:
3000
- config_library_paths = set(self._discover_library_files())
3039
+ config_library_paths = self._discover_library_files()
3001
3040
  except Exception as e:
3002
3041
  logger.exception("Failed to discover library files")
3003
3042
  return DiscoverLibrariesResultFailure(
3004
3043
  result_details=f"Failed to discover library files: {e}",
3005
3044
  )
3006
3045
 
3007
- discovered_libraries = set()
3046
+ discovered_libraries = []
3047
+ seen_libraries = set()
3008
3048
 
3009
3049
  # Process sandbox library first if requested
3010
3050
  if request.include_sandbox:
@@ -3033,39 +3073,24 @@ class LibraryManager:
3033
3073
  # Continue anyway if write failed - lifecycle will fail gracefully
3034
3074
 
3035
3075
  # Add to discovered libraries with is_sandbox=True
3036
- discovered_libraries.add(DiscoveredLibrary(path=sandbox_json_path, is_sandbox=True))
3037
-
3038
- # Create minimal LibraryInfo entry in discovered state if not already tracked
3039
- if sandbox_json_path_str not in self._library_file_path_to_info:
3040
- self._library_file_path_to_info[sandbox_json_path_str] = LibraryManager.LibraryInfo(
3041
- lifecycle_state=LibraryManager.LibraryLifecycleState.DISCOVERED,
3042
- fitness=LibraryManager.LibraryFitness.NOT_EVALUATED,
3043
- library_path=sandbox_json_path_str,
3044
- is_sandbox=True,
3045
- library_name=None,
3046
- library_version=None,
3047
- )
3076
+ if sandbox_json_path not in seen_libraries:
3077
+ seen_libraries.add(sandbox_json_path)
3078
+ discovered_libraries.append(DiscoveredLibrary(path=sandbox_json_path, is_sandbox=True))
3079
+
3080
+ # Create LibraryInfo entry for the sandbox library
3081
+ self._create_library_info_entry(sandbox_json_path_str, is_sandbox=True)
3048
3082
 
3049
3083
  # Add all regular libraries from config
3050
3084
  for file_path in config_library_paths:
3051
3085
  file_path_str = str(file_path)
3052
3086
 
3053
3087
  # Add to discovered libraries with is_sandbox=False
3054
- discovered_libraries.add(DiscoveredLibrary(path=file_path, is_sandbox=False))
3088
+ if file_path not in seen_libraries:
3089
+ seen_libraries.add(file_path)
3090
+ discovered_libraries.append(DiscoveredLibrary(path=file_path, is_sandbox=False))
3055
3091
 
3056
- # Skip if already tracked
3057
- if file_path_str in self._library_file_path_to_info:
3058
- continue
3059
-
3060
- # Create minimal LibraryInfo entry in discovered state
3061
- self._library_file_path_to_info[file_path_str] = LibraryManager.LibraryInfo(
3062
- lifecycle_state=LibraryManager.LibraryLifecycleState.DISCOVERED,
3063
- fitness=LibraryManager.LibraryFitness.NOT_EVALUATED,
3064
- library_path=file_path_str,
3065
- is_sandbox=False,
3066
- library_name=None,
3067
- library_version=None,
3068
- )
3092
+ # Create LibraryInfo entry for the library
3093
+ self._create_library_info_entry(file_path_str, is_sandbox=False)
3069
3094
 
3070
3095
  # Success path at the end
3071
3096
  return DiscoverLibrariesResultSuccess(
@@ -3230,20 +3255,25 @@ class LibraryManager:
3230
3255
  """Discover library JSON files from config and workspace recursively.
3231
3256
 
3232
3257
  Returns:
3233
- List of library file paths found
3258
+ List of library file paths found, in the order they appear in config
3234
3259
  """
3235
3260
  config_mgr = GriptapeNodes.ConfigManager()
3236
3261
  user_libraries_section = LIBRARIES_TO_REGISTER_KEY
3237
3262
 
3238
- discovered_libraries = set()
3263
+ discovered_libraries = []
3264
+ seen_libraries = set()
3239
3265
 
3240
3266
  def process_path(path: Path) -> None:
3241
3267
  """Process a path, handling both files and directories."""
3242
3268
  if path.is_dir():
3243
3269
  # Recursively find library files, skipping hidden directories
3244
- discovered_libraries.update(find_files_recursive(path, LibraryManager.LIBRARY_CONFIG_GLOB_PATTERN))
3245
- elif path.suffix == ".json":
3246
- discovered_libraries.add(path)
3270
+ for lib_path in find_files_recursive(path, LibraryManager.LIBRARY_CONFIG_GLOB_PATTERN):
3271
+ if lib_path not in seen_libraries:
3272
+ seen_libraries.add(lib_path)
3273
+ discovered_libraries.append(lib_path)
3274
+ elif path.suffix == ".json" and path not in seen_libraries:
3275
+ seen_libraries.add(path)
3276
+ discovered_libraries.append(path)
3247
3277
 
3248
3278
  # Add from config
3249
3279
  config_libraries = config_mgr.get_config_value(user_libraries_section, default=[])
@@ -3254,7 +3284,7 @@ class LibraryManager:
3254
3284
  if library_path.exists():
3255
3285
  process_path(library_path)
3256
3286
 
3257
- return list(discovered_libraries)
3287
+ return discovered_libraries
3258
3288
 
3259
3289
  async def check_library_update_request(self, request: CheckLibraryUpdateRequest) -> ResultPayload: # noqa: C901, PLR0911, PLR0912, PLR0915
3260
3290
  """Check if a library has updates available via git."""
@@ -1119,6 +1119,7 @@ class ModelManager:
1119
1119
  msg = f"Download status file not found for model '{model_id}'"
1120
1120
  raise FileNotFoundError(msg)
1121
1121
 
1122
+ # TODO: Replace with DeleteFileRequest https://github.com/griptape-ai/griptape-nodes/issues/3765
1122
1123
  status_file.unlink()
1123
1124
  return str(status_file)
1124
1125
 
@@ -402,9 +402,7 @@ class NodeManager:
402
402
  )
403
403
  # modifying to exception to try to catch all possible issues with node creation.
404
404
  except Exception as err:
405
- import traceback
406
-
407
- traceback.print_exc()
405
+ logger.error(err)
408
406
  details = f"Could not create Node '{final_node_name}' of type '{request.node_type}': {err}"
409
407
 
410
408
  # Check if we should create an Error Proxy node instead of failing
@@ -3291,6 +3289,7 @@ class NodeManager:
3291
3289
  create_node_request: CreateNodeRequest,
3292
3290
  *,
3293
3291
  use_pickling: bool = False,
3292
+ serialize_all_parameter_values: bool = False,
3294
3293
  ) -> list[SerializedNodeCommands.IndirectSetParameterValueCommand] | None:
3295
3294
  """Generates code to save a parameter value for a node in a Griptape workflow.
3296
3295
 
@@ -3309,6 +3308,7 @@ class NodeManager:
3309
3308
  serialized_parameter_value_tracker (SerializedParameterValueTracker): Object mapping maintaining value hashes to unique value UUIDs, and non-serializable values
3310
3309
  create_node_request (CreateNodeRequest): The node creation request that will be modified if serialization fails
3311
3310
  use_pickling (bool): If True, use pickle-based serialization; if False, use deep copy
3311
+ serialize_all_parameter_values (bool): If True, save all parameter values regardless of whether they were explicitly set or match defaults
3312
3312
 
3313
3313
  Returns:
3314
3314
  None (if no value to be serialized) or an IndirectSetParameterValueCommand linking the value to the unique value map
@@ -3329,8 +3329,11 @@ class NodeManager:
3329
3329
  # Save the value if it was explicitly set OR if it equals the default value.
3330
3330
  # The latter ensures the default is preserved when loading workflows,
3331
3331
  # even if the code's default value changes later.
3332
- if parameter.name in node.parameter_values or (
3333
- parameter.default_value is not None and effective_value == parameter.default_value
3332
+ # If serialize_all_parameter_values is True, save all parameter values regardless.
3333
+ if (
3334
+ serialize_all_parameter_values
3335
+ or parameter.name in node.parameter_values
3336
+ or (parameter.default_value is not None and effective_value == parameter.default_value)
3334
3337
  ):
3335
3338
  internal_value = effective_value
3336
3339
  # We have a value. Attempt to get a hash for it to see if it matches one
@@ -1,4 +1,6 @@
1
+ import asyncio
1
2
  import base64
3
+ import ctypes
2
4
  import logging
3
5
  import mimetypes
4
6
  import os
@@ -7,12 +9,14 @@ import shutil
7
9
  import stat
8
10
  import subprocess
9
11
  import sys
12
+ from ctypes import wintypes
10
13
  from dataclasses import dataclass
11
14
  from pathlib import Path
12
- from typing import Any, NamedTuple
15
+ from typing import Any, ClassVar, NamedTuple
13
16
 
14
17
  import aioshutil
15
18
  import portalocker
19
+ import send2trash
16
20
  from binaryornot.check import is_binary
17
21
  from rich.console import Console
18
22
 
@@ -35,6 +39,8 @@ from griptape_nodes.retained_mode.events.os_events import (
35
39
  DeleteFileRequest,
36
40
  DeleteFileResultFailure,
37
41
  DeleteFileResultSuccess,
42
+ DeletionBehavior,
43
+ DeletionOutcome,
38
44
  ExistingFilePolicy,
39
45
  FileIOFailureReason,
40
46
  FileSystemEntry,
@@ -139,6 +145,14 @@ class FilenameParts(NamedTuple):
139
145
  extension: str
140
146
 
141
147
 
148
+ class WindowsSpecialFolderError(OSError):
149
+ """Raised when Windows Shell API (SHGetFolderPathW) fails for a special folder.
150
+
151
+ Callers (e.g. try_resolve_windows_special_folder) catch this to fall back
152
+ to expanduser or other resolution.
153
+ """
154
+
155
+
142
156
  class FilePathValidationError(Exception):
143
157
  """Raised when file path validation fails before write operation.
144
158
 
@@ -170,6 +184,19 @@ class CopyTreeStats:
170
184
  total_bytes_copied: int
171
185
 
172
186
 
187
+ class WindowsSpecialFolderResult(NamedTuple):
188
+ """Result of resolving a Windows special folder from path parts.
189
+
190
+ Invariant: either both fields are None (not resolved), or both are set
191
+ (resolved). When resolved, special_path is the folder Path and
192
+ remaining_parts is the list of path components after the folder (may be
193
+ empty). We never return (None, list) or (Path, None).
194
+ """
195
+
196
+ special_path: Path | None
197
+ remaining_parts: list[str] | None
198
+
199
+
173
200
  class OSManager:
174
201
  """A class to manage OS-level scenarios.
175
202
 
@@ -177,6 +204,80 @@ class OSManager:
177
204
  This lays the groundwork to exclude specific functionality on a configuration basis.
178
205
  """
179
206
 
207
+ # Windows CSIDL constants for special folders (used by _expand_path)
208
+ # https://learn.microsoft.com/en-us/windows/win32/shell/csidl
209
+ WINDOWS_CSIDL_MAP: ClassVar[dict[str, int]] = {
210
+ "desktop": 0x0000, # CSIDL_DESKTOP
211
+ "documents": 0x0005, # CSIDL_PERSONAL (My Documents)
212
+ "downloads": 0x0033, # CSIDL_DOWNLOADS
213
+ "pictures": 0x0027, # CSIDL_MYPICTURES
214
+ "videos": 0x000E, # CSIDL_MYVIDEO
215
+ "music": 0x000D, # CSIDL_MYMUSIC
216
+ }
217
+
218
+ @staticmethod
219
+ def normalize_path_parts_for_special_folder(path_str: str) -> list[str]:
220
+ r"""Parse a path string into normalized parts for special folder detection.
221
+
222
+ Strips leading ~ or ~/, or %UserProfile% / %USERPROFILE% (case-insensitive);
223
+ expands env vars when %UserProfile% is present; returns lowercased path
224
+ parts. Used to detect Windows special folder names (e.g. ~/Downloads,
225
+ %UserProfile%/Desktop). Also strips Windows long path prefix (\\?\ or
226
+ \\?\UNC\) so prefixed paths parse correctly instead of producing "?"
227
+ as the first part.
228
+
229
+ Args:
230
+ path_str: Path string that may contain ~ or %UserProfile% (case-insensitive).
231
+
232
+ Returns:
233
+ List of lowercased path parts, e.g. ["downloads"] for "~/Downloads".
234
+ """
235
+ normalized = path_str.replace("\\", "/")
236
+ # Strip Windows long path prefix so we don't get "?" as first part
237
+ if normalized.upper().startswith("//?/UNC/"):
238
+ normalized = "//" + normalized[8:] # Keep UNC as //server/share
239
+ elif normalized.startswith("//?/"):
240
+ normalized = normalized[4:]
241
+ if normalized.startswith("~/"):
242
+ normalized = normalized[2:]
243
+ elif normalized.startswith("~"):
244
+ normalized = normalized[1:]
245
+ if "%USERPROFILE%" in normalized.upper():
246
+ normalized = os.path.expandvars(normalized)
247
+ normalized = normalized.replace("\\", "/") # expandvars can return backslashes on Windows
248
+ userprofile = os.environ.get("USERPROFILE", "")
249
+ if userprofile and normalized.lower().startswith(userprofile.lower().replace("\\", "/")):
250
+ normalized = normalized[len(userprofile) :].lstrip("/\\")
251
+ parts = [p.lower() for p in normalized.split("/") if p]
252
+ return parts
253
+
254
+ def try_resolve_windows_special_folder(self, parts: list[str]) -> WindowsSpecialFolderResult | None:
255
+ """Resolve Windows special folder from path parts.
256
+
257
+ If the first part matches a known special folder name (e.g. "desktop",
258
+ "downloads"), calls _get_windows_special_folder_path and returns a
259
+ result with special_path and remaining_parts. Returns None if parts are
260
+ empty, the first part is unknown, or the Shell API raises
261
+ WindowsSpecialFolderError (caller catches and falls back).
262
+
263
+ Args:
264
+ parts: Lowercased path parts from normalize_path_parts_for_special_folder.
265
+
266
+ Returns:
267
+ WindowsSpecialFolderResult when resolved (special_path and remaining_parts),
268
+ or None when no special folder could be resolved.
269
+ """
270
+ if not parts or parts[0] not in OSManager.WINDOWS_CSIDL_MAP:
271
+ return None
272
+ csidl = OSManager.WINDOWS_CSIDL_MAP[parts[0]]
273
+ try:
274
+ special_path = self._get_windows_special_folder_path(csidl)
275
+ except WindowsSpecialFolderError:
276
+ # No warning: Shell API failure is an expected fallback path; not useful to users.
277
+ return None
278
+ remaining = parts[1:] if len(parts) > 1 else []
279
+ return WindowsSpecialFolderResult(special_path=special_path, remaining_parts=remaining)
280
+
180
281
  def __init__(self, event_manager: EventManager | None = None):
181
282
  if event_manager is not None:
182
283
  event_manager.assign_manager_to_request_type(
@@ -230,18 +331,89 @@ class OSManager:
230
331
  """Get the workspace path from config."""
231
332
  return GriptapeNodes.ConfigManager().workspace_path
232
333
 
334
+ def _get_windows_special_folder_path(self, csidl: int) -> Path:
335
+ """Get Windows special folder path using Shell API.
336
+
337
+ Source: https://stackoverflow.com/a/30924555
338
+ Uses SHGetFolderPathW to get the actual location of special folders,
339
+ handling OneDrive redirections and other Windows folder redirections.
340
+ Callers (e.g. try_resolve_windows_special_folder) should catch
341
+ WindowsSpecialFolderError and fall back to expanduser.
342
+
343
+ Args:
344
+ csidl: CSIDL constant for the special folder (e.g., CSIDL_DESKTOP)
345
+
346
+ Returns:
347
+ Path to the special folder.
348
+
349
+ Raises:
350
+ RuntimeError: If not on Windows (programming error).
351
+ WindowsSpecialFolderError: If the Shell API fails (HRESULT or ctypes exception).
352
+ """
353
+ if not self.is_windows():
354
+ msg = "_get_windows_special_folder_path may only be called on Windows"
355
+ raise RuntimeError(msg)
356
+
357
+ # Argtypes for SHGetFolderPathW (Windows Shell API)
358
+ # https://learn.microsoft.com/en-us/windows/win32/shell/csidl
359
+ sh_get_folder_path_argtypes = (
360
+ wintypes.HWND,
361
+ ctypes.c_int,
362
+ wintypes.HANDLE,
363
+ wintypes.DWORD,
364
+ wintypes.LPCWSTR,
365
+ )
366
+
367
+ def _call_shell_api() -> Path:
368
+ # windll is Windows-only; code path is guarded by is_windows()
369
+ sh_get_folder_path = ctypes.windll.shell32.SHGetFolderPathW # pyright: ignore[reportAttributeAccessIssue]
370
+ sh_get_folder_path.argtypes = sh_get_folder_path_argtypes
371
+
372
+ path_buf = ctypes.create_unicode_buffer(wintypes.MAX_PATH)
373
+ result = sh_get_folder_path(0, csidl, 0, 0, path_buf)
374
+ if result != 0: # S_OK is 0; non-zero is an HRESULT error code
375
+ msg = f"Windows Shell API SHGetFolderPathW failed for CSIDL {csidl}: HRESULT {result}"
376
+ raise WindowsSpecialFolderError(msg)
377
+ return Path(path_buf.value)
378
+
379
+ try:
380
+ return _call_shell_api()
381
+ except WindowsSpecialFolderError:
382
+ raise
383
+ except Exception as e: # Broad catch: ctypes/Shell API can raise many types
384
+ msg = f"Windows Shell API SHGetFolderPathW failed for CSIDL {csidl}: {e}"
385
+ raise WindowsSpecialFolderError(msg) from e
386
+
233
387
  def _expand_path(self, path_str: str) -> Path:
234
- """Expand a path string, handling tilde and environment variables.
388
+ """Expand a path string, handling tilde, environment variables, and special folders.
389
+
390
+ Handles Windows special folders (like Desktop) that may be redirected to OneDrive
391
+ by using Windows Shell API (SHGetFolderPathW) to get the actual system paths.
235
392
 
236
393
  Args:
237
- path_str: Path string that may contain ~ or environment variables
394
+ path_str: Path string that may contain ~, environment variables, or special folder names
238
395
 
239
396
  Returns:
240
397
  Expanded Path object
241
398
  """
242
- # Expand environment variables first, then tilde
243
- expanded_vars = os.path.expandvars(path_str)
244
- return self.resolve_path_safely(Path(expanded_vars).expanduser())
399
+ resolved = None
400
+ if self.is_windows():
401
+ parts = self.normalize_path_parts_for_special_folder(path_str)
402
+ resolved = self.try_resolve_windows_special_folder(parts)
403
+
404
+ # Success path at the end - compute final path and return
405
+ if resolved is not None and resolved.special_path is not None:
406
+ extra_parts: list[str] = resolved.remaining_parts if resolved.remaining_parts else []
407
+ if extra_parts:
408
+ final_path = resolved.special_path / Path(*extra_parts)
409
+ else:
410
+ final_path = resolved.special_path
411
+ else:
412
+ expanded_vars = os.path.expandvars(path_str)
413
+ expanded_user = os.path.expanduser(expanded_vars) # noqa: PTH111
414
+ final_path = Path(expanded_user)
415
+
416
+ return self.resolve_path_safely(final_path)
245
417
 
246
418
  def resolve_path_safely(self, path: Path) -> Path:
247
419
  """Resolve a path consistently across platforms.
@@ -285,6 +457,13 @@ class OSManager:
285
457
  # This works consistently even for non-existent paths on Windows
286
458
  return Path(os.path.normpath(path))
287
459
 
460
+ def _path_needs_expansion(self, path_str: str) -> bool:
461
+ """Return True if path contains env vars, is absolute, or starts with ~ (needs _expand_path)."""
462
+ has_env_vars = "%" in path_str or "$" in path_str
463
+ is_absolute = Path(path_str).is_absolute()
464
+ starts_with_tilde = path_str.startswith("~")
465
+ return has_env_vars or is_absolute or starts_with_tilde
466
+
288
467
  def _resolve_file_path(self, path_str: str, *, workspace_only: bool = False) -> Path:
289
468
  """Resolve a file path, handling absolute, relative, and tilde paths.
290
469
 
@@ -296,10 +475,8 @@ class OSManager:
296
475
  Resolved Path object
297
476
  """
298
477
  try:
299
- if Path(path_str).is_absolute() or path_str.startswith("~"):
300
- # Expand tilde and environment variables for absolute paths or paths starting with ~
478
+ if self._path_needs_expansion(path_str):
301
479
  return self._expand_path(path_str)
302
- # Both workspace and system-wide modes resolve relative to current directory
303
480
  return self.resolve_path_safely(self._get_workspace_path() / path_str)
304
481
  except (ValueError, RuntimeError):
305
482
  if workspace_only:
@@ -1199,12 +1376,9 @@ class OSManager:
1199
1376
  # Get the directory path to list
1200
1377
  if request.directory_path is None:
1201
1378
  directory = self._get_workspace_path()
1202
- # Handle paths consistently - always resolve relative paths relative to current directory
1203
- elif Path(request.directory_path).is_absolute() or request.directory_path.startswith("~"):
1204
- # Expand tilde and environment variables for absolute paths or paths starting with ~
1379
+ elif self._path_needs_expansion(request.directory_path):
1205
1380
  directory = self._expand_path(request.directory_path)
1206
1381
  else:
1207
- # Both workspace and system-wide modes resolve relative to current directory
1208
1382
  directory = self.resolve_path_safely(self._get_workspace_path() / request.directory_path)
1209
1383
 
1210
1384
  # Check if directory exists
@@ -2036,6 +2210,8 @@ class OSManager:
2036
2210
  flags=portalocker.LockFlags.EXCLUSIVE | portalocker.LockFlags.NON_BLOCKING,
2037
2211
  ) as fh:
2038
2212
  fh.write(content)
2213
+ fh.flush()
2214
+ os.fsync(fh.fileno())
2039
2215
 
2040
2216
  # Calculate bytes written
2041
2217
  if isinstance(content, bytes):
@@ -2251,6 +2427,7 @@ class OSManager:
2251
2427
  for file_path, _ in files_with_times:
2252
2428
  try:
2253
2429
  # Delete the file.
2430
+ # TODO: Replace with DeleteFileRequest https://github.com/griptape-ai/griptape-nodes/issues/3765
2254
2431
  file_path.unlink()
2255
2432
  removed_count += 1
2256
2433
 
@@ -2538,10 +2715,10 @@ class OSManager:
2538
2715
  console.print(f"[red]Details: {e}[/red]")
2539
2716
  raise
2540
2717
 
2541
- async def on_delete_file_request(self, request: DeleteFileRequest) -> ResultPayload: # noqa: PLR0911, PLR0912, C901
2718
+ async def on_delete_file_request( # noqa: PLR0911, PLR0912, PLR0915, C901
2719
+ self, request: DeleteFileRequest
2720
+ ) -> DeleteFileResultSuccess | DeleteFileResultFailure:
2542
2721
  """Handle a request to delete a file or directory."""
2543
- # FAILURE CASES FIRST (per CLAUDE.md)
2544
-
2545
2722
  # Validate exactly one of path or file_entry provided and determine path to delete
2546
2723
  if request.path is not None and request.file_entry is not None:
2547
2724
  msg = "Attempted to delete file with both path and file_entry. Failed due to invalid parameters"
@@ -2578,28 +2755,88 @@ class OSManager:
2578
2755
  else:
2579
2756
  deleted_paths = [str(resolved_path)]
2580
2757
 
2581
- # Perform deletion
2582
- try:
2583
- if is_directory:
2584
- await aioshutil.rmtree(resolved_path, onexc=OSManager.remove_readonly)
2585
- else:
2586
- resolved_path.unlink()
2587
- except PermissionError as e:
2588
- msg = f"Attempted to delete {'directory' if is_directory else 'file'} at path {path_to_delete}. Failed due to permission denied: {e}"
2589
- return DeleteFileResultFailure(failure_reason=FileIOFailureReason.PERMISSION_DENIED, result_details=msg)
2590
- except OSError as e:
2591
- msg = f"Attempted to delete {'directory' if is_directory else 'file'} at path {path_to_delete}. Failed due to I/O error: {e}"
2592
- return DeleteFileResultFailure(failure_reason=FileIOFailureReason.IO_ERROR, result_details=msg)
2593
- except Exception as e:
2594
- msg = f"Attempted to delete {'directory' if is_directory else 'file'} at path {path_to_delete}. Failed due to unexpected error: {type(e).__name__}: {e}"
2595
- return DeleteFileResultFailure(failure_reason=FileIOFailureReason.UNKNOWN, result_details=msg)
2758
+ # Helper function for permanent deletion
2759
+ async def attempt_permanent_delete() -> DeleteFileResultFailure | None:
2760
+ """Permanently delete the file/directory. Returns failure result or None on success."""
2761
+ try:
2762
+ if is_directory:
2763
+ await aioshutil.rmtree(resolved_path, onexc=OSManager.remove_readonly)
2764
+ else:
2765
+ resolved_path.unlink()
2766
+ except PermissionError as e:
2767
+ msg = f"Attempted to delete {'directory' if is_directory else 'file'} at path {path_to_delete}. Failed due to permission denied: {e}"
2768
+ return DeleteFileResultFailure(failure_reason=FileIOFailureReason.PERMISSION_DENIED, result_details=msg)
2769
+ except OSError as e:
2770
+ msg = f"Attempted to delete {'directory' if is_directory else 'file'} at path {path_to_delete}. Failed due to I/O error: {e}"
2771
+ return DeleteFileResultFailure(failure_reason=FileIOFailureReason.IO_ERROR, result_details=msg)
2772
+ except Exception as e:
2773
+ msg = f"Attempted to delete {'directory' if is_directory else 'file'} at path {path_to_delete}. Failed due to unexpected error: {type(e).__name__}: {e}"
2774
+ return DeleteFileResultFailure(failure_reason=FileIOFailureReason.UNKNOWN, result_details=msg)
2775
+ return None
2776
+
2777
+ # Helper function for recycle bin deletion
2778
+ async def attempt_recycle_bin_delete() -> DeleteFileResultFailure | None:
2779
+ """Send to recycle bin. Returns failure result or None on success."""
2780
+ try:
2781
+ await asyncio.to_thread(send2trash.send2trash, str(resolved_path))
2782
+ except send2trash.TrashPermissionError as e:
2783
+ msg = f"Attempted to send {'directory' if is_directory else 'file'} at path {path_to_delete} to the recycle bin. Failed due to recycle bin unavailable: {e}"
2784
+ return DeleteFileResultFailure(
2785
+ failure_reason=FileIOFailureReason.RECYCLE_BIN_UNAVAILABLE, result_details=msg
2786
+ )
2787
+ except OSError as e:
2788
+ msg = f"Attempted to send {'directory' if is_directory else 'file'} at path {path_to_delete} to the recycle bin. Failed due to I/O error: {e}"
2789
+ return DeleteFileResultFailure(failure_reason=FileIOFailureReason.IO_ERROR, result_details=msg)
2790
+ except Exception as e:
2791
+ msg = f"Attempted to send {'directory' if is_directory else 'file'} at path {path_to_delete} to the recycle bin. Failed due to unexpected error: {type(e).__name__}: {e}"
2792
+ return DeleteFileResultFailure(failure_reason=FileIOFailureReason.UNKNOWN, result_details=msg)
2793
+ return None
2794
+
2795
+ # Perform deletion based on requested behavior
2796
+ match request.deletion_behavior:
2797
+ case DeletionBehavior.PERMANENTLY_DELETE:
2798
+ failure = await attempt_permanent_delete()
2799
+ if failure:
2800
+ return failure
2801
+ outcome = DeletionOutcome.PERMANENTLY_DELETED
2802
+ result_details = (
2803
+ f"Successfully deleted {'directory' if is_directory else 'file'} at path {path_to_delete}"
2804
+ )
2805
+
2806
+ case DeletionBehavior.RECYCLE_BIN_ONLY:
2807
+ failure = await attempt_recycle_bin_delete()
2808
+ if failure:
2809
+ return failure
2810
+ outcome = DeletionOutcome.SENT_TO_RECYCLE_BIN
2811
+ result_details = f"Successfully sent {'directory' if is_directory else 'file'} at path {path_to_delete} to the recycle bin"
2812
+
2813
+ case DeletionBehavior.PREFER_RECYCLE_BIN:
2814
+ failure = await attempt_recycle_bin_delete()
2815
+ if failure:
2816
+ # Fall back to permanent deletion
2817
+ failure = await attempt_permanent_delete()
2818
+ if failure:
2819
+ return failure
2820
+ outcome = DeletionOutcome.PERMANENTLY_DELETED
2821
+ result_details = ResultDetails(
2822
+ message=f"Attempted to send {'directory' if is_directory else 'file'} at path {path_to_delete} to the recycle bin, but this failed; fell back to permanent deletion, which succeeded.",
2823
+ level=logging.WARNING,
2824
+ )
2825
+ else:
2826
+ outcome = DeletionOutcome.SENT_TO_RECYCLE_BIN
2827
+ result_details = f"Successfully sent {'directory' if is_directory else 'file'} at path {path_to_delete} to the recycle bin"
2828
+
2829
+ case _:
2830
+ msg = f"Unknown/unsupported deletion behavior: {request.deletion_behavior}"
2831
+ raise ValueError(msg)
2596
2832
 
2597
2833
  # SUCCESS PATH AT END
2598
2834
  return DeleteFileResultSuccess(
2599
2835
  deleted_path=str(resolved_path),
2600
2836
  was_directory=is_directory,
2601
2837
  deleted_paths=deleted_paths,
2602
- result_details=f"Successfully deleted {'directory' if is_directory else 'file'} at path {path_to_delete}",
2838
+ outcome=outcome,
2839
+ result_details=result_details,
2603
2840
  )
2604
2841
 
2605
2842
  def on_get_file_info_request( # noqa: PLR0911