griptape-nodes 0.53.0__py3-none-any.whl → 0.54.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. griptape_nodes/__init__.py +5 -2
  2. griptape_nodes/app/app.py +4 -26
  3. griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +35 -5
  4. griptape_nodes/bootstrap/workflow_executors/workflow_executor.py +15 -1
  5. griptape_nodes/cli/commands/config.py +4 -1
  6. griptape_nodes/cli/commands/init.py +5 -3
  7. griptape_nodes/cli/commands/libraries.py +14 -8
  8. griptape_nodes/cli/commands/models.py +504 -0
  9. griptape_nodes/cli/commands/self.py +5 -2
  10. griptape_nodes/cli/main.py +11 -1
  11. griptape_nodes/cli/shared.py +0 -9
  12. griptape_nodes/common/directed_graph.py +17 -1
  13. griptape_nodes/drivers/storage/base_storage_driver.py +40 -20
  14. griptape_nodes/drivers/storage/griptape_cloud_storage_driver.py +24 -29
  15. griptape_nodes/drivers/storage/local_storage_driver.py +17 -13
  16. griptape_nodes/exe_types/node_types.py +219 -14
  17. griptape_nodes/exe_types/param_components/__init__.py +1 -0
  18. griptape_nodes/exe_types/param_components/execution_status_component.py +138 -0
  19. griptape_nodes/machines/control_flow.py +129 -92
  20. griptape_nodes/machines/dag_builder.py +207 -0
  21. griptape_nodes/machines/parallel_resolution.py +264 -276
  22. griptape_nodes/machines/sequential_resolution.py +9 -7
  23. griptape_nodes/node_library/library_registry.py +34 -1
  24. griptape_nodes/retained_mode/events/app_events.py +5 -1
  25. griptape_nodes/retained_mode/events/base_events.py +7 -7
  26. griptape_nodes/retained_mode/events/config_events.py +30 -0
  27. griptape_nodes/retained_mode/events/execution_events.py +2 -2
  28. griptape_nodes/retained_mode/events/model_events.py +296 -0
  29. griptape_nodes/retained_mode/griptape_nodes.py +10 -1
  30. griptape_nodes/retained_mode/managers/agent_manager.py +14 -0
  31. griptape_nodes/retained_mode/managers/config_manager.py +44 -3
  32. griptape_nodes/retained_mode/managers/event_manager.py +8 -2
  33. griptape_nodes/retained_mode/managers/flow_manager.py +45 -14
  34. griptape_nodes/retained_mode/managers/library_manager.py +3 -3
  35. griptape_nodes/retained_mode/managers/model_manager.py +1107 -0
  36. griptape_nodes/retained_mode/managers/node_manager.py +26 -26
  37. griptape_nodes/retained_mode/managers/object_manager.py +1 -1
  38. griptape_nodes/retained_mode/managers/os_manager.py +6 -6
  39. griptape_nodes/retained_mode/managers/settings.py +87 -9
  40. griptape_nodes/retained_mode/managers/static_files_manager.py +77 -9
  41. griptape_nodes/retained_mode/managers/sync_manager.py +10 -5
  42. griptape_nodes/retained_mode/managers/workflow_manager.py +101 -92
  43. griptape_nodes/retained_mode/retained_mode.py +19 -0
  44. griptape_nodes/servers/__init__.py +1 -0
  45. griptape_nodes/{mcp_server/server.py → servers/mcp.py} +1 -1
  46. griptape_nodes/{app/api.py → servers/static.py} +43 -40
  47. griptape_nodes/traits/button.py +124 -6
  48. griptape_nodes/traits/multi_options.py +188 -0
  49. griptape_nodes/traits/numbers_selector.py +77 -0
  50. griptape_nodes/traits/options.py +93 -2
  51. griptape_nodes/utils/async_utils.py +31 -0
  52. {griptape_nodes-0.53.0.dist-info → griptape_nodes-0.54.1.dist-info}/METADATA +3 -1
  53. {griptape_nodes-0.53.0.dist-info → griptape_nodes-0.54.1.dist-info}/RECORD +56 -47
  54. {griptape_nodes-0.53.0.dist-info → griptape_nodes-0.54.1.dist-info}/WHEEL +1 -1
  55. /griptape_nodes/{mcp_server → servers}/ws_request_manager.py +0 -0
  56. {griptape_nodes-0.53.0.dist-info → griptape_nodes-0.54.1.dist-info}/entry_points.txt +0 -0
@@ -357,9 +357,9 @@ class NodeManager:
357
357
  else:
358
358
  details = f"Successfully created Node '{final_node_name}' in Flow '{parent_flow_name}'"
359
359
 
360
- log_level = "DEBUG"
360
+ log_level = logging.DEBUG
361
361
  if remapped_requested_node_name:
362
- log_level = "WARNING"
362
+ log_level = logging.WARNING
363
363
  details = f"{details}. WARNING: Had to rename from original node name requested '{request.node_name}' as an object with this name already existed."
364
364
 
365
365
  # Special handling for paired classes (e.g., create a Start node and it automatically creates a corresponding End node already connected).
@@ -445,28 +445,27 @@ class NodeManager:
445
445
  # get the current node executing / resolving
446
446
  # if it's in connected nodes, cancel flow.
447
447
  # otherwise, leave it.
448
- control_node_name, resolving_node_name = GriptapeNodes.FlowManager().flow_state(parent_flow)
448
+ control_node_names, resolving_node_names = GriptapeNodes.FlowManager().flow_state(parent_flow)
449
449
  connected_nodes = parent_flow.get_all_connected_nodes(node)
450
450
  cancelled = False
451
- if control_node_name is not None:
452
- control_node = GriptapeNodes.ObjectManager().get_object_by_name(control_node_name)
453
- if control_node in connected_nodes:
454
- result = GriptapeNodes.handle_request(CancelFlowRequest(flow_name=parent_flow_name))
455
- cancelled = True
456
- if not result.succeeded():
457
- details = (
458
- f"Attempted to delete a Node '{node.name}'. Failed because running flow could not cancel."
459
- )
460
- return DeleteNodeResultFailure(result_details=details)
461
- if resolving_node_name is not None and not cancelled:
462
- resolving_node = GriptapeNodes.ObjectManager().get_object_by_name(resolving_node_name)
463
- if resolving_node in connected_nodes:
464
- result = GriptapeNodes.handle_request(CancelFlowRequest(flow_name=parent_flow_name))
465
- if not result.succeeded():
466
- details = (
467
- f"Attempted to delete a Node '{node.name}'. Failed because running flow could not cancel."
468
- )
469
- return DeleteNodeResultFailure(result_details=details)
451
+ if control_node_names is not None:
452
+ for control_node_name in control_node_names:
453
+ control_node = GriptapeNodes.ObjectManager().get_object_by_name(control_node_name)
454
+ if control_node in connected_nodes:
455
+ result = GriptapeNodes.handle_request(CancelFlowRequest(flow_name=parent_flow_name))
456
+ cancelled = True
457
+ if not result.succeeded():
458
+ details = f"Attempted to delete a Node '{node.name}'. Failed because running flow could not cancel."
459
+ return DeleteNodeResultFailure(result_details=details)
460
+ if resolving_node_names is not None and not cancelled:
461
+ for resolving_node_name in resolving_node_names:
462
+ resolving_node = GriptapeNodes.ObjectManager().get_object_by_name(resolving_node_name)
463
+ if resolving_node in connected_nodes:
464
+ result = GriptapeNodes.handle_request(CancelFlowRequest(flow_name=parent_flow_name))
465
+ if not result.succeeded():
466
+ details = f"Attempted to delete a Node '{node.name}'. Failed because running flow could not cancel."
467
+ return DeleteNodeResultFailure(result_details=details)
468
+ break # Only need to cancel once
470
469
  # Clear the execution queue, because we don't want to hit this node eventually.
471
470
  parent_flow.clear_execution_queue()
472
471
  return None
@@ -1326,7 +1325,7 @@ class NodeManager:
1326
1325
  # Early return with warning - we're just preserving the original changes
1327
1326
  details = f"Parameter '{request.parameter_name}' alteration recorded for ErrorProxyNode '{node_name}'. Original node '{node.original_node_type}' had loading errors - preserving changes for correct recreation when dependency '{node.original_library_name}' is resolved."
1328
1327
 
1329
- result_details = ResultDetails(message=details, level="WARNING")
1328
+ result_details = ResultDetails(message=details, level=logging.WARNING)
1330
1329
  return AlterParameterDetailsResultSuccess(result_details=result_details)
1331
1330
 
1332
1331
  # Reject runtime parameter alterations on ErrorProxy
@@ -1359,7 +1358,7 @@ class NodeManager:
1359
1358
  # TODO: https://github.com/griptape-ai/griptape-nodes/issues/826
1360
1359
  details = f"Attempted to alter details for Element '{request.parameter_name}' from Node '{node_name}'. Could only alter some values because the Element was not user-defined (i.e., critical to the Node implementation). Only user-defined Elements can be totally modified from a Node."
1361
1360
  return AlterParameterDetailsResultSuccess(
1362
- result_details=ResultDetails(message=details, level="WARNING")
1361
+ result_details=ResultDetails(message=details, level=logging.WARNING)
1363
1362
  )
1364
1363
  self.modify_key_parameter_fields(request, element)
1365
1364
 
@@ -1521,8 +1520,9 @@ class NodeManager:
1521
1520
  return result
1522
1521
 
1523
1522
  # Validate that parameters can be set at all (note: we want the value to be set during initial setup, but not after)
1524
- # This check comes after before_value_set to allow nodes to temporarily modify settable state
1525
- if not parameter.settable and not request.initial_setup:
1523
+ # We skip this if it's a passthru from a connection or if we're on initial setup; those always trump settable.
1524
+ # This check comes *AFTER* before_value_set() to allow nodes to temporarily modify settable state
1525
+ if not parameter.settable and not incoming_node_set and not request.initial_setup:
1526
1526
  details = f"Attempted to set parameter value for '{node_name}.{request.parameter_name}'. Failed because that Parameter was flagged as not settable."
1527
1527
  result = SetParameterValueResultFailure(result_details=details)
1528
1528
  return result
@@ -102,7 +102,7 @@ class ObjectManager:
102
102
  details += " WARNING: Originally requested the name '{request.requested_name}', but that was taken."
103
103
  log_level = logging.WARNING
104
104
  if log_level == logging.WARNING:
105
- result_details = ResultDetails(message=details, level="WARNING")
105
+ result_details = ResultDetails(message=details, level=logging.WARNING)
106
106
  else:
107
107
  result_details = details
108
108
  return RenameObjectResultSuccess(final_name=final_name, result_details=result_details)
@@ -1,4 +1,5 @@
1
1
  import base64
2
+ import logging
2
3
  import mimetypes
3
4
  import os
4
5
  import shutil
@@ -516,15 +517,14 @@ class OSManager:
516
517
 
517
518
  # Check if file is already in the static files directory
518
519
  config_manager = GriptapeNodes.ConfigManager()
519
- static_files_directory = config_manager.get_config_value("static_files_directory", default="staticfiles")
520
- static_dir = config_manager.workspace_path / static_files_directory
520
+ static_dir = config_manager.workspace_path
521
521
 
522
522
  try:
523
523
  # Check if file is within the static files directory
524
524
  file_relative_to_static = file_path.relative_to(static_dir)
525
525
  # File is in static directory, construct URL directly
526
- static_url = f"http://localhost:8124/static/{file_relative_to_static}"
527
- msg = f"Image already in static directory, returning URL: {static_url}"
526
+ static_url = f"http://localhost:8124/workspace/{file_relative_to_static}"
527
+ msg = f"Image already in workspace directory, returning URL: {static_url}"
528
528
  logger.debug(msg)
529
529
  except ValueError:
530
530
  # File is not in static directory, create small preview
@@ -767,7 +767,7 @@ class OSManager:
767
767
  if file_path.exists():
768
768
  msg = f"Path already exists: {file_path}"
769
769
  return CreateFileResultSuccess(
770
- created_path=str(file_path), result_details=ResultDetails(message=msg, level="WARNING")
770
+ created_path=str(file_path), result_details=ResultDetails(message=msg, level=logging.WARNING)
771
771
  )
772
772
 
773
773
  # Create parent directories if needed
@@ -836,7 +836,7 @@ class OSManager:
836
836
  return RenameFileResultSuccess(
837
837
  old_path=str(old_path),
838
838
  new_path=str(new_path),
839
- result_details=ResultDetails(message=details, level="INFO"),
839
+ result_details=ResultDetails(message=details, level=logging.INFO),
840
840
  )
841
841
 
842
842
  except Exception as e:
@@ -2,7 +2,41 @@ from enum import StrEnum
2
2
  from pathlib import Path
3
3
  from typing import Any, Literal
4
4
 
5
- from pydantic import BaseModel, ConfigDict, Field, field_validator
5
+ from pydantic import BaseModel, ConfigDict, field_validator
6
+ from pydantic import Field as PydanticField
7
+
8
+
9
+ class Category(BaseModel):
10
+ """A category with name and optional description."""
11
+
12
+ name: str
13
+ description: str | None = None
14
+
15
+ def __str__(self) -> str:
16
+ return self.name
17
+
18
+
19
+ # Predefined categories to avoid repetition
20
+ FILE_SYSTEM = Category(name="File System", description="Directories and file paths for the application")
21
+ APPLICATION_EVENTS = Category(name="Application Events", description="Configuration for application lifecycle events")
22
+ API_KEYS = Category(name="API Keys", description="API keys and authentication credentials")
23
+ EXECUTION = Category(name="Execution", description="Workflow execution and processing settings")
24
+ STORAGE = Category(name="Storage", description="Data storage and persistence configuration")
25
+ SYSTEM_REQUIREMENTS = Category(name="System Requirements", description="System resource requirements and limits")
26
+
27
+
28
+ def Field(category: str | Category = "General", **kwargs) -> Any:
29
+ """Enhanced Field with default category that can be overridden."""
30
+ if "json_schema_extra" not in kwargs:
31
+ # Convert Category to dict or use string directly
32
+ if isinstance(category, Category):
33
+ category_dict = {"name": category.name}
34
+ if category.description:
35
+ category_dict["description"] = category.description
36
+ kwargs["json_schema_extra"] = {"category": category_dict}
37
+ else:
38
+ kwargs["json_schema_extra"] = {"category": category}
39
+ return PydanticField(**kwargs)
6
40
 
7
41
 
8
42
  class WorkflowExecutionMode(StrEnum):
@@ -12,9 +46,20 @@ class WorkflowExecutionMode(StrEnum):
12
46
  PARALLEL = "parallel"
13
47
 
14
48
 
49
+ class LogLevel(StrEnum):
50
+ """Logging level for the application."""
51
+
52
+ CRITICAL = "CRITICAL"
53
+ ERROR = "ERROR"
54
+ WARNING = "WARNING"
55
+ INFO = "INFO"
56
+ DEBUG = "DEBUG"
57
+
58
+
15
59
  class AppInitializationComplete(BaseModel):
16
60
  libraries_to_register: list[str] = Field(default_factory=list)
17
61
  workflows_to_register: list[str] = Field(default_factory=list)
62
+ models_to_download: list[str] = Field(default_factory=list)
18
63
 
19
64
 
20
65
  class AppEvents(BaseModel):
@@ -49,17 +94,26 @@ class AppEvents(BaseModel):
49
94
  class Settings(BaseModel):
50
95
  model_config = ConfigDict(extra="allow")
51
96
 
52
- workspace_directory: str = Field(default=str(Path().cwd() / "GriptapeNodes"))
97
+ workspace_directory: str = Field(
98
+ category=FILE_SYSTEM,
99
+ default=str(Path().cwd() / "GriptapeNodes"),
100
+ )
53
101
  static_files_directory: str = Field(
102
+ category=FILE_SYSTEM,
54
103
  default="staticfiles",
55
104
  description="Path to the static files directory, relative to the workspace directory.",
56
105
  )
57
106
  sandbox_library_directory: str = Field(
107
+ category=FILE_SYSTEM,
58
108
  default="sandbox_library",
59
109
  description="Path to the sandbox library directory (useful while developing nodes). If presented as just a directory (e.g., 'sandbox_library') it will be interpreted as being relative to the workspace directory.",
60
110
  )
61
- app_events: AppEvents = Field(default_factory=AppEvents)
111
+ app_events: AppEvents = Field(
112
+ category=APPLICATION_EVENTS,
113
+ default_factory=AppEvents,
114
+ )
62
115
  nodes: dict[str, Any] = Field(
116
+ category=API_KEYS,
63
117
  default_factory=lambda: {
64
118
  "Griptape": {"GT_CLOUD_API_KEY": "$GT_CLOUD_API_KEY"},
65
119
  "OpenAI": {"OPENAI_API_KEY": "$OPENAI_API_KEY"},
@@ -94,11 +148,13 @@ class Settings(BaseModel):
94
148
  },
95
149
  "Tavily": {"TAVILY_API_KEY": "$TAVILY_API_KEY"},
96
150
  "Serper": {"SERPER_API_KEY": "$SERPER_API_KEY"},
97
- }
151
+ },
98
152
  )
99
- log_level: str = Field(default="INFO")
153
+ log_level: LogLevel = Field(category=EXECUTION, default=LogLevel.INFO)
100
154
  workflow_execution_mode: WorkflowExecutionMode = Field(
101
- default=WorkflowExecutionMode.SEQUENTIAL, description="Workflow execution mode for node processing"
155
+ category=EXECUTION,
156
+ default=WorkflowExecutionMode.SEQUENTIAL,
157
+ description="Workflow execution mode for node processing",
102
158
  )
103
159
 
104
160
  @field_validator("workflow_execution_mode", mode="before")
@@ -117,18 +173,40 @@ class Settings(BaseModel):
117
173
  # Return default for any other type
118
174
  return WorkflowExecutionMode.SEQUENTIAL
119
175
 
176
+ @field_validator("log_level", mode="before")
177
+ @classmethod
178
+ def validate_log_level(cls, v: Any) -> LogLevel:
179
+ """Convert string values to LogLevel enum."""
180
+ if isinstance(v, str):
181
+ try:
182
+ return LogLevel(v.upper())
183
+ except ValueError:
184
+ # Return default if invalid string
185
+ return LogLevel.INFO
186
+ elif isinstance(v, LogLevel):
187
+ return v
188
+ else:
189
+ # Return default for any other type
190
+ return LogLevel.INFO
191
+
120
192
  max_nodes_in_parallel: int | None = Field(
121
- default=5, description="Maximum number of nodes executing at a time for parallel execution."
193
+ category=EXECUTION,
194
+ default=5,
195
+ description="Maximum number of nodes executing at a time for parallel execution.",
122
196
  )
123
- storage_backend: Literal["local", "gtc"] = Field(default="local")
197
+ storage_backend: Literal["local", "gtc"] = Field(category=STORAGE, default="local")
124
198
  minimum_disk_space_gb_libraries: float = Field(
199
+ category=SYSTEM_REQUIREMENTS,
125
200
  default=10.0,
126
201
  description="Minimum disk space in GB required for library installation and virtual environment operations",
127
202
  )
128
203
  minimum_disk_space_gb_workflows: float = Field(
129
- default=1.0, description="Minimum disk space in GB required for saving workflows"
204
+ category=SYSTEM_REQUIREMENTS,
205
+ default=1.0,
206
+ description="Minimum disk space in GB required for saving workflows",
130
207
  )
131
208
  synced_workflows_directory: str = Field(
209
+ category=FILE_SYSTEM,
132
210
  default="synced_workflows",
133
211
  description="Path to the synced workflows directory, relative to the workspace directory.",
134
212
  )
@@ -1,6 +1,8 @@
1
1
  import base64
2
2
  import binascii
3
3
  import logging
4
+ import threading
5
+ from pathlib import Path
4
6
 
5
7
  import httpx
6
8
  from xdg_base_dirs import xdg_config_home
@@ -8,6 +10,7 @@ from xdg_base_dirs import xdg_config_home
8
10
  from griptape_nodes.drivers.storage import StorageBackend
9
11
  from griptape_nodes.drivers.storage.griptape_cloud_storage_driver import GriptapeCloudStorageDriver
10
12
  from griptape_nodes.drivers.storage.local_storage_driver import LocalStorageDriver
13
+ from griptape_nodes.retained_mode.events.app_events import AppInitializationComplete
11
14
  from griptape_nodes.retained_mode.events.static_file_events import (
12
15
  CreateStaticFileDownloadUrlRequest,
13
16
  CreateStaticFileDownloadUrlResultFailure,
@@ -22,6 +25,7 @@ from griptape_nodes.retained_mode.events.static_file_events import (
22
25
  from griptape_nodes.retained_mode.managers.config_manager import ConfigManager
23
26
  from griptape_nodes.retained_mode.managers.event_manager import EventManager
24
27
  from griptape_nodes.retained_mode.managers.secrets_manager import SecretsManager
28
+ from griptape_nodes.servers.static import start_static_server
25
29
 
26
30
  logger = logging.getLogger("griptape_nodes")
27
31
 
@@ -46,9 +50,10 @@ class StaticFilesManager:
46
50
  """
47
51
  self.config_manager = config_manager
48
52
 
49
- storage_backend = config_manager.get_config_value("storage_backend", default=StorageBackend.LOCAL)
53
+ self.storage_backend = config_manager.get_config_value("storage_backend", default=StorageBackend.LOCAL)
54
+ workspace_directory = Path(config_manager.get_config_value("workspace_directory"))
50
55
 
51
- match storage_backend:
56
+ match self.storage_backend:
52
57
  case StorageBackend.GTC:
53
58
  bucket_id = secrets_manager.get_secret("GT_CLOUD_BUCKET_ID", should_error_on_not_found=False)
54
59
 
@@ -56,20 +61,21 @@ class StaticFilesManager:
56
61
  logger.warning(
57
62
  "GT_CLOUD_BUCKET_ID secret is not available, falling back to local storage. Run `gtn init` to set it up."
58
63
  )
59
- self.storage_driver = LocalStorageDriver()
64
+ self.storage_driver = LocalStorageDriver(workspace_directory)
60
65
  else:
61
66
  static_files_directory = config_manager.get_config_value(
62
67
  "static_files_directory", default="staticfiles"
63
68
  )
64
69
  self.storage_driver = GriptapeCloudStorageDriver(
70
+ workspace_directory,
65
71
  bucket_id=bucket_id,
66
72
  api_key=secrets_manager.get_secret("GT_CLOUD_API_KEY"),
67
73
  static_files_directory=static_files_directory,
68
74
  )
69
75
  case StorageBackend.LOCAL:
70
- self.storage_driver = LocalStorageDriver()
76
+ self.storage_driver = LocalStorageDriver(workspace_directory)
71
77
  case _:
72
- msg = f"Invalid storage backend: {storage_backend}"
78
+ msg = f"Invalid storage backend: {self.storage_backend}"
73
79
  raise ValueError(msg)
74
80
 
75
81
  if event_manager is not None:
@@ -82,6 +88,11 @@ class StaticFilesManager:
82
88
  event_manager.assign_manager_to_request_type(
83
89
  CreateStaticFileDownloadUrlRequest, self.on_handle_create_static_file_download_url_request
84
90
  )
91
+ event_manager.add_listener_to_app_event(
92
+ AppInitializationComplete,
93
+ self.on_app_initialization_complete,
94
+ )
95
+ # TODO: Listen for shutdown event (https://github.com/griptape-ai/griptape-nodes/issues/2149) to stop static server
85
96
 
86
97
  def on_handle_create_static_file_request(
87
98
  self,
@@ -118,8 +129,12 @@ class StaticFilesManager:
118
129
  A result object indicating success or failure.
119
130
  """
120
131
  file_name = request.file_name
132
+
133
+ resolved_directory = self._get_static_files_directory()
134
+ full_file_path = Path(resolved_directory) / file_name
135
+
121
136
  try:
122
- response = self.storage_driver.create_signed_upload_url(file_name)
137
+ response = self.storage_driver.create_signed_upload_url(full_file_path)
123
138
  except ValueError as e:
124
139
  msg = f"Failed to create presigned URL for file {file_name}: {e}"
125
140
  logger.error(msg)
@@ -145,8 +160,12 @@ class StaticFilesManager:
145
160
  A result object indicating success or failure.
146
161
  """
147
162
  file_name = request.file_name
163
+
164
+ resolved_directory = self._get_static_files_directory()
165
+ full_file_path = Path(resolved_directory) / file_name
166
+
148
167
  try:
149
- url = self.storage_driver.create_signed_download_url(file_name)
168
+ url = self.storage_driver.create_signed_download_url(full_file_path)
150
169
  except ValueError as e:
151
170
  msg = f"Failed to create presigned URL for file {file_name}: {e}"
152
171
  logger.error(msg)
@@ -156,6 +175,11 @@ class StaticFilesManager:
156
175
  url=url, result_details="Successfully created static file download URL"
157
176
  )
158
177
 
178
+ def on_app_initialization_complete(self, _payload: AppInitializationComplete) -> None:
179
+ # Start static server in daemon thread if enabled
180
+ if self.storage_backend == StorageBackend.LOCAL:
181
+ threading.Thread(target=start_static_server, daemon=True, name="static-server").start()
182
+
159
183
  def save_static_file(self, data: bytes, file_name: str) -> str:
160
184
  """Saves a static file to the workspace directory.
161
185
 
@@ -168,7 +192,10 @@ class StaticFilesManager:
168
192
  Returns:
169
193
  The URL of the saved file.
170
194
  """
171
- response = self.storage_driver.create_signed_upload_url(file_name)
195
+ resolved_directory = self._get_static_files_directory()
196
+ file_path = Path(resolved_directory) / file_name
197
+
198
+ response = self.storage_driver.create_signed_upload_url(file_path)
172
199
 
173
200
  try:
174
201
  response = httpx.request(
@@ -180,6 +207,47 @@ class StaticFilesManager:
180
207
  logger.error(msg)
181
208
  raise ValueError(msg) from e
182
209
 
183
- url = self.storage_driver.create_signed_download_url(file_name)
210
+ url = self.storage_driver.create_signed_download_url(file_path)
184
211
 
185
212
  return url
213
+
214
+ def _get_static_files_directory(self) -> str:
215
+ """Get the appropriate static files directory based on the current workflow context.
216
+
217
+ Returns:
218
+ The directory path to use for static files, relative to the workspace directory.
219
+ If a workflow is active, returns the staticfiles subdirectory within the
220
+ workflow's directory relative to workspace. Otherwise, returns the staticfiles
221
+ subdirectory relative to workspace.
222
+ """
223
+ from griptape_nodes.node_library.workflow_registry import WorkflowRegistry
224
+ from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
225
+
226
+ workspace_path = self.config_manager.workspace_path
227
+ static_files_subdir = self.config_manager.get_config_value("static_files_directory", default="staticfiles")
228
+
229
+ # Check if there's an active workflow context
230
+ context_manager = GriptapeNodes.ContextManager()
231
+ if context_manager.has_current_workflow():
232
+ try:
233
+ # Get the current workflow name and its file path
234
+ workflow_name = context_manager.get_current_workflow_name()
235
+ workflow = WorkflowRegistry.get_workflow_by_name(workflow_name)
236
+
237
+ # Get the directory containing the workflow file
238
+ workflow_file_path = Path(WorkflowRegistry.get_complete_file_path(workflow.file_path))
239
+ workflow_directory = workflow_file_path.parent
240
+
241
+ # Make the workflow directory relative to workspace
242
+ relative_workflow_dir = workflow_directory.relative_to(workspace_path)
243
+ return str(relative_workflow_dir / static_files_subdir)
244
+
245
+ except (KeyError, AttributeError) as e:
246
+ # If anything goes wrong getting workflow info, fall back to workspace-relative
247
+ logger.warning("Failed to get workflow directory for static files, using workspace: %s", e)
248
+ except ValueError as e:
249
+ # If workflow directory is not within workspace, fall back to workspace-relative
250
+ logger.warning("Workflow directory is outside workspace, using workspace-relative static files: %s", e)
251
+
252
+ # If no workflow context or workflow lookup failed, return just the static files subdirectory
253
+ return static_files_subdir
@@ -133,7 +133,9 @@ class SyncManager:
133
133
  return StartSyncAllCloudWorkflowsResultSuccess(
134
134
  sync_directory=str(sync_dir),
135
135
  total_workflows=0,
136
- result_details=ResultDetails(message="No workflow files found in cloud storage.", level="INFO"),
136
+ result_details=ResultDetails(
137
+ message="No workflow files found in cloud storage.", level=logging.INFO
138
+ ),
137
139
  )
138
140
 
139
141
  # Start background sync with unique ID
@@ -209,7 +211,10 @@ class SyncManager:
209
211
  msg = "Cloud storage api_key not configured. Set GT_CLOUD_API_KEY secret."
210
212
  raise RuntimeError(msg)
211
213
 
214
+ workspace_directory = Path(self._config_manager.get_config_value("workspace_directory"))
215
+
212
216
  return GriptapeCloudStorageDriver(
217
+ workspace_directory,
213
218
  bucket_id=bucket_id,
214
219
  base_url=base_url,
215
220
  api_key=api_key,
@@ -232,7 +237,7 @@ class SyncManager:
232
237
  sync_dir = self._sync_dir
233
238
 
234
239
  # Download file content from cloud
235
- file_content = storage_driver.download_file(filename)
240
+ file_content = storage_driver.download_file(Path(filename))
236
241
 
237
242
  # Write to local sync directory
238
243
  local_file_path = sync_dir / filename
@@ -283,7 +288,7 @@ class SyncManager:
283
288
 
284
289
  # Upload to cloud storage using the upload_file method
285
290
  filename = file_path.name
286
- storage_driver.upload_file(filename, file_content)
291
+ storage_driver.upload_file(Path(filename), file_content)
287
292
 
288
293
  logger.info("Successfully uploaded workflow file to cloud: %s", filename)
289
294
 
@@ -301,7 +306,7 @@ class SyncManager:
301
306
  filename = file_path.name
302
307
 
303
308
  # Use the storage driver's delete method
304
- storage_driver.delete_file(filename)
309
+ storage_driver.delete_file(Path(filename))
305
310
  logger.info("Successfully deleted workflow file from cloud: %s", filename)
306
311
 
307
312
  except Exception as e:
@@ -388,7 +393,7 @@ class SyncManager:
388
393
  """
389
394
  try:
390
395
  # Download file content
391
- file_content = storage_driver.download_file(file_name)
396
+ file_content = storage_driver.download_file(Path(file_name))
392
397
 
393
398
  # Extract just the filename (remove any directory prefixes)
394
399
  local_filename = Path(file_name).name