griptape-nodes 0.55.1__py3-none-any.whl → 0.56.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- griptape_nodes/cli/commands/init.py +88 -0
- griptape_nodes/cli/commands/models.py +2 -0
- griptape_nodes/cli/shared.py +1 -0
- griptape_nodes/exe_types/core_types.py +104 -0
- griptape_nodes/exe_types/node_types.py +9 -12
- griptape_nodes/machines/control_flow.py +10 -0
- griptape_nodes/machines/dag_builder.py +21 -2
- griptape_nodes/machines/parallel_resolution.py +25 -10
- griptape_nodes/node_library/workflow_registry.py +73 -3
- griptape_nodes/retained_mode/events/execution_events.py +12 -2
- griptape_nodes/retained_mode/events/flow_events.py +58 -0
- griptape_nodes/retained_mode/events/resource_events.py +290 -0
- griptape_nodes/retained_mode/events/workflow_events.py +57 -2
- griptape_nodes/retained_mode/griptape_nodes.py +9 -1
- griptape_nodes/retained_mode/managers/flow_manager.py +678 -12
- griptape_nodes/retained_mode/managers/library_manager.py +13 -19
- griptape_nodes/retained_mode/managers/model_manager.py +184 -83
- griptape_nodes/retained_mode/managers/node_manager.py +3 -3
- griptape_nodes/retained_mode/managers/os_manager.py +118 -1
- griptape_nodes/retained_mode/managers/resource_components/__init__.py +1 -0
- griptape_nodes/retained_mode/managers/resource_components/capability_field.py +41 -0
- griptape_nodes/retained_mode/managers/resource_components/comparator.py +18 -0
- griptape_nodes/retained_mode/managers/resource_components/resource_instance.py +236 -0
- griptape_nodes/retained_mode/managers/resource_components/resource_type.py +79 -0
- griptape_nodes/retained_mode/managers/resource_manager.py +306 -0
- griptape_nodes/retained_mode/managers/resource_types/__init__.py +1 -0
- griptape_nodes/retained_mode/managers/resource_types/cpu_resource.py +108 -0
- griptape_nodes/retained_mode/managers/resource_types/os_resource.py +87 -0
- griptape_nodes/retained_mode/managers/settings.py +5 -0
- griptape_nodes/retained_mode/managers/sync_manager.py +10 -3
- griptape_nodes/retained_mode/managers/workflow_manager.py +359 -261
- {griptape_nodes-0.55.1.dist-info → griptape_nodes-0.56.0.dist-info}/METADATA +1 -1
- {griptape_nodes-0.55.1.dist-info → griptape_nodes-0.56.0.dist-info}/RECORD +35 -25
- {griptape_nodes-0.55.1.dist-info → griptape_nodes-0.56.0.dist-info}/WHEEL +1 -1
- {griptape_nodes-0.55.1.dist-info → griptape_nodes-0.56.0.dist-info}/entry_points.txt +0 -0
|
@@ -2041,32 +2041,26 @@ class LibraryManager:
|
|
|
2041
2041
|
config_mgr = GriptapeNodes.ConfigManager()
|
|
2042
2042
|
user_libraries_section = "app_events.on_app_initialization_complete.libraries_to_register"
|
|
2043
2043
|
|
|
2044
|
-
|
|
2045
|
-
|
|
2046
|
-
# Add from config
|
|
2047
|
-
config_libraries = config_mgr.get_config_value(user_libraries_section, default=[])
|
|
2048
|
-
libraries_to_process.extend(config_libraries)
|
|
2049
|
-
|
|
2050
|
-
# Add from workspace - recursive discovery of library JSON files
|
|
2051
|
-
workspace_path = config_mgr.workspace_path
|
|
2052
|
-
libraries_to_process.append(str(workspace_path))
|
|
2053
|
-
|
|
2054
|
-
library_files = []
|
|
2044
|
+
discovered_libraries = set()
|
|
2055
2045
|
|
|
2056
2046
|
def process_path(path: Path) -> None:
|
|
2057
2047
|
"""Process a path, handling both files and directories."""
|
|
2058
2048
|
if path.is_dir():
|
|
2059
2049
|
# Process all library JSON files recursively in the directory
|
|
2060
|
-
|
|
2050
|
+
discovered_libraries.update(path.rglob(LibraryManager.LIBRARY_CONFIG_FILENAME))
|
|
2061
2051
|
elif path.suffix == ".json":
|
|
2062
|
-
|
|
2052
|
+
discovered_libraries.add(path)
|
|
2063
2053
|
|
|
2064
|
-
#
|
|
2065
|
-
|
|
2066
|
-
|
|
2067
|
-
|
|
2068
|
-
# Handle library config files and directories only (skip requirement specifiers)
|
|
2054
|
+
# Add from config
|
|
2055
|
+
config_libraries = config_mgr.get_config_value(user_libraries_section, default=[])
|
|
2056
|
+
for library_path_str in config_libraries:
|
|
2057
|
+
library_path = Path(library_path_str)
|
|
2069
2058
|
if library_path.exists():
|
|
2070
2059
|
process_path(library_path)
|
|
2071
2060
|
|
|
2072
|
-
|
|
2061
|
+
# Add from workspace - recursive discovery of library JSON files
|
|
2062
|
+
workspace_path = config_mgr.workspace_path
|
|
2063
|
+
if workspace_path.exists():
|
|
2064
|
+
process_path(workspace_path)
|
|
2065
|
+
|
|
2066
|
+
return list(discovered_libraries)
|
|
@@ -65,6 +65,17 @@ class SearchResultsData:
|
|
|
65
65
|
query_info: QueryInfo
|
|
66
66
|
|
|
67
67
|
|
|
68
|
+
@dataclass
|
|
69
|
+
class DownloadParams:
|
|
70
|
+
"""Data class for model download parameters."""
|
|
71
|
+
|
|
72
|
+
model_id: str
|
|
73
|
+
local_dir: str | None = None
|
|
74
|
+
revision: str | None = None
|
|
75
|
+
allow_patterns: list[str] | None = None
|
|
76
|
+
ignore_patterns: list[str] | None = None
|
|
77
|
+
|
|
78
|
+
|
|
68
79
|
class ModelDownloadTracker(tqdm):
|
|
69
80
|
"""Custom tqdm progress bar that tracks aggregate model download progress."""
|
|
70
81
|
|
|
@@ -129,7 +140,7 @@ class ModelDownloadTracker(tqdm):
|
|
|
129
140
|
status_file = self._get_status_file_path()
|
|
130
141
|
current_time = datetime.now(UTC).isoformat()
|
|
131
142
|
|
|
132
|
-
logger.
|
|
143
|
+
logger.debug(
|
|
133
144
|
"ModelDownloadTracker initializing status file: %s (total_files=%s)", status_file, self.total
|
|
134
145
|
)
|
|
135
146
|
|
|
@@ -146,7 +157,7 @@ class ModelDownloadTracker(tqdm):
|
|
|
146
157
|
with status_file.open("w") as f:
|
|
147
158
|
json.dump(data, f, indent=2)
|
|
148
159
|
|
|
149
|
-
logger.
|
|
160
|
+
logger.debug("ModelDownloadTracker status file initialized successfully")
|
|
150
161
|
|
|
151
162
|
except Exception:
|
|
152
163
|
logger.exception("ModelDownloadTracker._init_status_file failed")
|
|
@@ -295,7 +306,7 @@ class ModelManager:
|
|
|
295
306
|
async def on_handle_download_model_request(self, request: DownloadModelRequest) -> ResultPayload:
|
|
296
307
|
"""Handle model download requests asynchronously.
|
|
297
308
|
|
|
298
|
-
This method
|
|
309
|
+
This method downloads models from Hugging Face Hub using the provided parameters.
|
|
299
310
|
It supports both model IDs and full URLs, and can download entire repositories
|
|
300
311
|
or specific files based on the patterns provided.
|
|
301
312
|
|
|
@@ -303,89 +314,127 @@ class ModelManager:
|
|
|
303
314
|
request: The download request containing model ID and options
|
|
304
315
|
|
|
305
316
|
Returns:
|
|
306
|
-
ResultPayload: Success result
|
|
317
|
+
ResultPayload: Success result with download completion or failure with error details
|
|
307
318
|
"""
|
|
308
319
|
parsed_model_id = self._parse_model_id(request.model_id)
|
|
309
320
|
if parsed_model_id != request.model_id:
|
|
310
321
|
logger.debug("Parsed model ID '%s' from URL '%s'", parsed_model_id, request.model_id)
|
|
311
322
|
|
|
312
323
|
try:
|
|
313
|
-
download_params =
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
324
|
+
download_params = DownloadParams(
|
|
325
|
+
model_id=parsed_model_id,
|
|
326
|
+
local_dir=request.local_dir,
|
|
327
|
+
revision=request.revision,
|
|
328
|
+
allow_patterns=request.allow_patterns,
|
|
329
|
+
ignore_patterns=request.ignore_patterns,
|
|
330
|
+
)
|
|
320
331
|
|
|
321
332
|
task = asyncio.create_task(self._download_model_task(download_params))
|
|
322
333
|
self._download_tasks[parsed_model_id] = task
|
|
323
334
|
|
|
324
|
-
|
|
335
|
+
await task
|
|
336
|
+
self._update_download_status_success(parsed_model_id)
|
|
337
|
+
except asyncio.CancelledError:
|
|
338
|
+
# Handle task cancellation gracefully
|
|
339
|
+
logger.info("Download request cancelled for model '%s'", parsed_model_id)
|
|
325
340
|
|
|
326
341
|
return DownloadModelResultSuccess(
|
|
327
342
|
model_id=parsed_model_id,
|
|
328
|
-
result_details=
|
|
343
|
+
result_details=f"Successfully downloaded model '{parsed_model_id}'",
|
|
329
344
|
)
|
|
330
345
|
|
|
331
346
|
except Exception as e:
|
|
332
|
-
error_msg = f"Failed to
|
|
347
|
+
error_msg = f"Failed to download model '{request.model_id}': {e}"
|
|
348
|
+
# Update status file to mark download as failed due to cancellation
|
|
349
|
+
self._update_download_status_failure(parsed_model_id, error_msg)
|
|
350
|
+
|
|
333
351
|
return DownloadModelResultFailure(
|
|
334
352
|
result_details=error_msg,
|
|
335
353
|
exception=e,
|
|
336
354
|
)
|
|
355
|
+
else:
|
|
356
|
+
return DownloadModelResultSuccess(
|
|
357
|
+
model_id=parsed_model_id,
|
|
358
|
+
result_details=f"Successfully downloaded model '{parsed_model_id}'",
|
|
359
|
+
)
|
|
360
|
+
finally:
|
|
361
|
+
# Clean up the task reference
|
|
362
|
+
if parsed_model_id in self._download_tasks:
|
|
363
|
+
del self._download_tasks[parsed_model_id]
|
|
337
364
|
|
|
338
|
-
|
|
339
|
-
"""
|
|
365
|
+
def _get_download_local_path(self, params: DownloadParams) -> str:
|
|
366
|
+
"""Get the local path where the model was downloaded.
|
|
340
367
|
|
|
341
368
|
Args:
|
|
342
|
-
|
|
343
|
-
"""
|
|
344
|
-
model_id = download_params["model_id"]
|
|
345
|
-
logger.info("Starting background download for model: %s", model_id)
|
|
369
|
+
params: Download parameters
|
|
346
370
|
|
|
347
|
-
|
|
348
|
-
|
|
371
|
+
Returns:
|
|
372
|
+
Local path where the model is stored
|
|
373
|
+
"""
|
|
374
|
+
if params.local_dir:
|
|
375
|
+
return params.local_dir
|
|
349
376
|
|
|
350
|
-
#
|
|
351
|
-
|
|
352
|
-
cmd.extend(["--local-dir", str(download_params["local_dir"])])
|
|
353
|
-
if download_params.get("revision") and download_params["revision"] != "main":
|
|
354
|
-
cmd.extend(["--revision", str(download_params["revision"])])
|
|
377
|
+
# Otherwise, use the HuggingFace cache directory
|
|
378
|
+
from huggingface_hub import snapshot_download
|
|
355
379
|
|
|
356
380
|
try:
|
|
357
|
-
#
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
381
|
+
# Get the path without actually downloading (since it's already downloaded)
|
|
382
|
+
return snapshot_download(
|
|
383
|
+
repo_id=params.model_id,
|
|
384
|
+
repo_type="model",
|
|
385
|
+
revision=params.revision,
|
|
386
|
+
local_files_only=True, # Only check local cache
|
|
362
387
|
)
|
|
388
|
+
except Exception:
|
|
389
|
+
# Fallback: construct the expected cache path
|
|
390
|
+
from huggingface_hub.constants import HF_HUB_CACHE
|
|
363
391
|
|
|
392
|
+
cache_path = Path(HF_HUB_CACHE)
|
|
393
|
+
return str(cache_path / f"models--{params.model_id.replace('/', '--')}")
|
|
394
|
+
|
|
395
|
+
async def _download_model_task(self, download_params: DownloadParams) -> None:
|
|
396
|
+
"""Background task for downloading a model using CLI command.
|
|
397
|
+
|
|
398
|
+
Args:
|
|
399
|
+
download_params: Download parameters
|
|
400
|
+
|
|
401
|
+
Returns:
|
|
402
|
+
str: Local path where the model was downloaded
|
|
403
|
+
|
|
404
|
+
Raises:
|
|
405
|
+
Exception: If download fails
|
|
406
|
+
"""
|
|
407
|
+
model_id = download_params.model_id
|
|
408
|
+
logger.info("Starting download for model: %s", model_id)
|
|
409
|
+
|
|
410
|
+
# Build CLI command
|
|
411
|
+
cmd = [sys.executable, "-m", "griptape_nodes", "models", "download", download_params.model_id]
|
|
412
|
+
|
|
413
|
+
if download_params.local_dir:
|
|
414
|
+
cmd.extend(["--local-dir", download_params.local_dir])
|
|
415
|
+
if download_params.revision and download_params.revision != "main":
|
|
416
|
+
cmd.extend(["--revision", download_params.revision])
|
|
417
|
+
|
|
418
|
+
# Start subprocess
|
|
419
|
+
process = await asyncio.create_subprocess_exec(
|
|
420
|
+
*cmd,
|
|
421
|
+
stdout=asyncio.subprocess.PIPE,
|
|
422
|
+
stderr=asyncio.subprocess.PIPE,
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
try:
|
|
364
426
|
# Store process for cancellation
|
|
365
|
-
|
|
366
|
-
self._download_processes[model_id] = process
|
|
427
|
+
self._download_processes[model_id] = process
|
|
367
428
|
|
|
368
|
-
|
|
369
|
-
stdout, stderr = await process.communicate()
|
|
429
|
+
stdout, _ = await process.communicate()
|
|
370
430
|
|
|
371
431
|
if process.returncode == 0:
|
|
372
432
|
logger.info("Successfully downloaded model '%s'", model_id)
|
|
373
433
|
else:
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
except asyncio.CancelledError:
|
|
377
|
-
logger.info("Download cancelled for model '%s'", model_id)
|
|
378
|
-
raise
|
|
379
|
-
|
|
380
|
-
except Exception:
|
|
381
|
-
logger.exception("Error downloading model '%s'", model_id)
|
|
382
|
-
|
|
434
|
+
raise ValueError(stdout.decode().strip())
|
|
383
435
|
finally:
|
|
384
|
-
if
|
|
385
|
-
|
|
386
|
-
del self._download_tasks[model_id]
|
|
387
|
-
if model_id in self._download_processes:
|
|
388
|
-
del self._download_processes[model_id]
|
|
436
|
+
if model_id in self._download_processes:
|
|
437
|
+
del self._download_processes[model_id]
|
|
389
438
|
|
|
390
439
|
async def on_handle_list_models_request(self, request: ListModelsRequest) -> ResultPayload: # noqa: ARG002
|
|
391
440
|
"""Handle model listing requests asynchronously.
|
|
@@ -612,48 +661,28 @@ class ModelManager:
|
|
|
612
661
|
# Create download tasks for concurrent execution
|
|
613
662
|
download_tasks = []
|
|
614
663
|
for model_id in all_models:
|
|
615
|
-
task = asyncio.create_task(
|
|
664
|
+
task = asyncio.create_task(
|
|
665
|
+
self.on_handle_download_model_request(
|
|
666
|
+
DownloadModelRequest(
|
|
667
|
+
model_id=model_id,
|
|
668
|
+
local_dir=None,
|
|
669
|
+
revision="main",
|
|
670
|
+
allow_patterns=None,
|
|
671
|
+
ignore_patterns=None,
|
|
672
|
+
)
|
|
673
|
+
)
|
|
674
|
+
)
|
|
616
675
|
download_tasks.append(task)
|
|
617
676
|
|
|
618
677
|
# Wait for all downloads to complete
|
|
619
678
|
results = await asyncio.gather(*download_tasks, return_exceptions=True)
|
|
620
679
|
|
|
621
680
|
# Log summary of results
|
|
622
|
-
successful = sum(1 for result in results if not isinstance(result,
|
|
681
|
+
successful = sum(1 for result in results if not isinstance(result, DownloadModelResultFailure))
|
|
623
682
|
failed = len(results) - successful
|
|
624
683
|
|
|
625
684
|
logger.info("Completed automatic model downloads: %d successful, %d failed", successful, failed)
|
|
626
685
|
|
|
627
|
-
async def _download_model_with_logging(self, model_id: str) -> None:
|
|
628
|
-
"""Download a single model with proper logging.
|
|
629
|
-
|
|
630
|
-
Args:
|
|
631
|
-
model_id: The model ID to download
|
|
632
|
-
"""
|
|
633
|
-
logger.info("Auto-downloading model: %s", model_id)
|
|
634
|
-
|
|
635
|
-
# Create download request with default parameters
|
|
636
|
-
request = DownloadModelRequest(
|
|
637
|
-
model_id=model_id,
|
|
638
|
-
local_dir=None, # Use default cache directory
|
|
639
|
-
revision="main",
|
|
640
|
-
allow_patterns=None,
|
|
641
|
-
ignore_patterns=None,
|
|
642
|
-
)
|
|
643
|
-
|
|
644
|
-
try:
|
|
645
|
-
# Run the download asynchronously
|
|
646
|
-
result = await self.on_handle_download_model_request(request)
|
|
647
|
-
|
|
648
|
-
if isinstance(result, DownloadModelResultFailure):
|
|
649
|
-
logger.warning("Failed to auto-download model '%s': %s", model_id, result.result_details)
|
|
650
|
-
elif not isinstance(result, DownloadModelResultSuccess):
|
|
651
|
-
logger.warning("Unknown result type for model '%s' download: %s", model_id, type(result))
|
|
652
|
-
|
|
653
|
-
except Exception as e:
|
|
654
|
-
logger.error("Unexpected error auto-downloading model '%s': %s", model_id, e)
|
|
655
|
-
raise
|
|
656
|
-
|
|
657
686
|
def _get_status_file_path(self, model_id: str) -> Path:
|
|
658
687
|
"""Get the path to the status file for a model.
|
|
659
688
|
|
|
@@ -1050,7 +1079,7 @@ class ModelManager:
|
|
|
1050
1079
|
task = self._download_tasks[model_id]
|
|
1051
1080
|
if not task.done():
|
|
1052
1081
|
task.cancel()
|
|
1053
|
-
logger.
|
|
1082
|
+
logger.debug("Cancelled active download task for model '%s'", model_id)
|
|
1054
1083
|
del self._download_tasks[model_id]
|
|
1055
1084
|
|
|
1056
1085
|
# Delete status file
|
|
@@ -1105,3 +1134,75 @@ class ModelManager:
|
|
|
1105
1134
|
|
|
1106
1135
|
status_file.unlink()
|
|
1107
1136
|
return str(status_file)
|
|
1137
|
+
|
|
1138
|
+
def _update_download_status_failure(self, model_id: str, error_message: str) -> None:
|
|
1139
|
+
"""Update the status file to mark download as failed.
|
|
1140
|
+
|
|
1141
|
+
Args:
|
|
1142
|
+
model_id: The model ID that failed to download
|
|
1143
|
+
error_message: The error message describing the failure
|
|
1144
|
+
"""
|
|
1145
|
+
try:
|
|
1146
|
+
status_file = self._get_status_file_path(model_id)
|
|
1147
|
+
|
|
1148
|
+
if not status_file.exists():
|
|
1149
|
+
logger.warning("Status file does not exist for failed model '%s'", model_id)
|
|
1150
|
+
return
|
|
1151
|
+
|
|
1152
|
+
with status_file.open() as f:
|
|
1153
|
+
data = json.load(f)
|
|
1154
|
+
|
|
1155
|
+
current_time = datetime.now(UTC).isoformat()
|
|
1156
|
+
|
|
1157
|
+
data.update(
|
|
1158
|
+
{
|
|
1159
|
+
"status": "failed",
|
|
1160
|
+
"updated_at": current_time,
|
|
1161
|
+
"failed_at": current_time,
|
|
1162
|
+
"error_message": error_message,
|
|
1163
|
+
}
|
|
1164
|
+
)
|
|
1165
|
+
|
|
1166
|
+
with status_file.open("w") as f:
|
|
1167
|
+
json.dump(data, f, indent=2)
|
|
1168
|
+
|
|
1169
|
+
logger.debug("Updated status file to 'failed' for model '%s'", model_id)
|
|
1170
|
+
|
|
1171
|
+
except Exception:
|
|
1172
|
+
logger.exception("Failed to update status file for failed model '%s'", model_id)
|
|
1173
|
+
|
|
1174
|
+
def _update_download_status_success(self, model_id: str) -> None:
|
|
1175
|
+
"""Update the status file to mark download as completed.
|
|
1176
|
+
|
|
1177
|
+
Args:
|
|
1178
|
+
model_id: The model ID that was successfully downloaded
|
|
1179
|
+
local_path: The local path where the model was downloaded
|
|
1180
|
+
"""
|
|
1181
|
+
try:
|
|
1182
|
+
status_file = self._get_status_file_path(model_id)
|
|
1183
|
+
|
|
1184
|
+
if not status_file.exists():
|
|
1185
|
+
logger.warning("Status file does not exist for completed model '%s'", model_id)
|
|
1186
|
+
return
|
|
1187
|
+
|
|
1188
|
+
with status_file.open() as f:
|
|
1189
|
+
data = json.load(f)
|
|
1190
|
+
|
|
1191
|
+
current_time = datetime.now(UTC).isoformat()
|
|
1192
|
+
|
|
1193
|
+
data.update(
|
|
1194
|
+
{
|
|
1195
|
+
"status": "completed",
|
|
1196
|
+
"updated_at": current_time,
|
|
1197
|
+
"completed_at": current_time,
|
|
1198
|
+
"progress_percent": 100.0,
|
|
1199
|
+
}
|
|
1200
|
+
)
|
|
1201
|
+
|
|
1202
|
+
with status_file.open("w") as f:
|
|
1203
|
+
json.dump(data, f, indent=2)
|
|
1204
|
+
|
|
1205
|
+
logger.debug("Updated status file to 'completed' for model '%s'", model_id)
|
|
1206
|
+
|
|
1207
|
+
except Exception:
|
|
1208
|
+
logger.exception("Failed to update status file for completed model '%s'", model_id)
|
|
@@ -446,7 +446,7 @@ class NodeManager:
|
|
|
446
446
|
# get the current node executing / resolving
|
|
447
447
|
# if it's in connected nodes, cancel flow.
|
|
448
448
|
# otherwise, leave it.
|
|
449
|
-
control_node_names, resolving_node_names
|
|
449
|
+
control_node_names, resolving_node_names = GriptapeNodes.FlowManager().flow_state(parent_flow)
|
|
450
450
|
connected_nodes = parent_flow.get_all_connected_nodes(node)
|
|
451
451
|
cancelled = False
|
|
452
452
|
if control_node_names is not None:
|
|
@@ -2061,7 +2061,7 @@ class NodeManager:
|
|
|
2061
2061
|
# Normal node - use current parameter values
|
|
2062
2062
|
for parameter in node.parameters:
|
|
2063
2063
|
# SetParameterValueRequest event
|
|
2064
|
-
set_param_value_requests = NodeManager.
|
|
2064
|
+
set_param_value_requests = NodeManager.handle_parameter_value_saving(
|
|
2065
2065
|
parameter=parameter,
|
|
2066
2066
|
node=node,
|
|
2067
2067
|
unique_parameter_uuid_to_values=request.unique_parameter_uuid_to_values,
|
|
@@ -2482,7 +2482,7 @@ class NodeManager:
|
|
|
2482
2482
|
return indirect_set_value_command
|
|
2483
2483
|
|
|
2484
2484
|
@staticmethod
|
|
2485
|
-
def
|
|
2485
|
+
def handle_parameter_value_saving(
|
|
2486
2486
|
parameter: Parameter,
|
|
2487
2487
|
node: BaseNode,
|
|
2488
2488
|
unique_parameter_uuid_to_values: dict[SerializedNodeCommands.UniqueParameterValueUUID, Any],
|
|
@@ -12,6 +12,7 @@ from typing import Any
|
|
|
12
12
|
from binaryornot.check import is_binary
|
|
13
13
|
from rich.console import Console
|
|
14
14
|
|
|
15
|
+
from griptape_nodes.retained_mode.events.app_events import AppInitializationComplete
|
|
15
16
|
from griptape_nodes.retained_mode.events.base_events import ResultDetails, ResultPayload
|
|
16
17
|
from griptape_nodes.retained_mode.events.os_events import (
|
|
17
18
|
CreateFileRequest,
|
|
@@ -31,8 +32,16 @@ from griptape_nodes.retained_mode.events.os_events import (
|
|
|
31
32
|
RenameFileResultFailure,
|
|
32
33
|
RenameFileResultSuccess,
|
|
33
34
|
)
|
|
35
|
+
from griptape_nodes.retained_mode.events.resource_events import (
|
|
36
|
+
CreateResourceInstanceRequest,
|
|
37
|
+
CreateResourceInstanceResultSuccess,
|
|
38
|
+
RegisterResourceTypeRequest,
|
|
39
|
+
RegisterResourceTypeResultSuccess,
|
|
40
|
+
)
|
|
34
41
|
from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes, logger
|
|
35
42
|
from griptape_nodes.retained_mode.managers.event_manager import EventManager
|
|
43
|
+
from griptape_nodes.retained_mode.managers.resource_types.cpu_resource import CPUResourceType
|
|
44
|
+
from griptape_nodes.retained_mode.managers.resource_types.os_resource import OSResourceType
|
|
36
45
|
|
|
37
46
|
console = Console()
|
|
38
47
|
|
|
@@ -74,6 +83,9 @@ class OSManager:
|
|
|
74
83
|
request_type=RenameFileRequest, callback=self.on_rename_file_request
|
|
75
84
|
)
|
|
76
85
|
|
|
86
|
+
# Register for app initialization event to setup system resources
|
|
87
|
+
event_manager.add_listener_to_app_event(AppInitializationComplete, self.on_app_initialization_complete)
|
|
88
|
+
|
|
77
89
|
def _get_workspace_path(self) -> Path:
|
|
78
90
|
"""Get the workspace path from config."""
|
|
79
91
|
return GriptapeNodes.ConfigManager().workspace_path
|
|
@@ -374,7 +386,7 @@ class OSManager:
|
|
|
374
386
|
try:
|
|
375
387
|
stat = entry.stat()
|
|
376
388
|
# Get path relative to workspace if within workspace
|
|
377
|
-
|
|
389
|
+
_is_entry_in_workspace, entry_path = self._validate_workspace_path(entry)
|
|
378
390
|
mime_type = self._detect_mime_type(entry)
|
|
379
391
|
entries.append(
|
|
380
392
|
FileSystemEntry(
|
|
@@ -843,3 +855,108 @@ class OSManager:
|
|
|
843
855
|
msg = f"Failed to rename {request.old_path} to {request.new_path}: {e}"
|
|
844
856
|
logger.error(msg)
|
|
845
857
|
return RenameFileResultFailure(result_details=msg)
|
|
858
|
+
|
|
859
|
+
def on_app_initialization_complete(self, _payload: AppInitializationComplete) -> None:
|
|
860
|
+
"""Handle app initialization complete event by registering system resources."""
|
|
861
|
+
self._register_system_resources()
|
|
862
|
+
|
|
863
|
+
# NEW Resource Management Methods
|
|
864
|
+
def _register_system_resources(self) -> None:
|
|
865
|
+
"""Register OS and CPU resource types with ResourceManager and create system instances."""
|
|
866
|
+
self._attempt_generate_os_resources()
|
|
867
|
+
self._attempt_generate_cpu_resources()
|
|
868
|
+
|
|
869
|
+
def _attempt_generate_os_resources(self) -> None:
|
|
870
|
+
"""Register OS resource type and create system OS instance if successful."""
|
|
871
|
+
# Register OS resource type
|
|
872
|
+
os_resource_type = OSResourceType()
|
|
873
|
+
register_request = RegisterResourceTypeRequest(resource_type=os_resource_type)
|
|
874
|
+
result = GriptapeNodes.handle_request(register_request)
|
|
875
|
+
|
|
876
|
+
if not isinstance(result, RegisterResourceTypeResultSuccess):
|
|
877
|
+
logger.error("Attempted to register OS resource type. Failed due to resource type registration failure")
|
|
878
|
+
return
|
|
879
|
+
|
|
880
|
+
logger.debug("Successfully registered OS resource type")
|
|
881
|
+
# Registration successful, now create instance
|
|
882
|
+
self._create_system_os_instance()
|
|
883
|
+
|
|
884
|
+
def _attempt_generate_cpu_resources(self) -> None:
|
|
885
|
+
"""Register CPU resource type and create system CPU instance if successful."""
|
|
886
|
+
# Register CPU resource type
|
|
887
|
+
cpu_resource_type = CPUResourceType()
|
|
888
|
+
register_request = RegisterResourceTypeRequest(resource_type=cpu_resource_type)
|
|
889
|
+
result = GriptapeNodes.handle_request(register_request)
|
|
890
|
+
|
|
891
|
+
if not isinstance(result, RegisterResourceTypeResultSuccess):
|
|
892
|
+
logger.error("Attempted to register CPU resource type. Failed due to resource type registration failure")
|
|
893
|
+
return
|
|
894
|
+
|
|
895
|
+
logger.debug("Successfully registered CPU resource type")
|
|
896
|
+
# Registration successful, now create instance
|
|
897
|
+
self._create_system_cpu_instance()
|
|
898
|
+
|
|
899
|
+
def _create_system_os_instance(self) -> None:
|
|
900
|
+
"""Create system OS instance."""
|
|
901
|
+
os_capabilities = {
|
|
902
|
+
"platform": self._get_platform_name(),
|
|
903
|
+
"arch": self._get_architecture(),
|
|
904
|
+
"version": self._get_platform_version(),
|
|
905
|
+
}
|
|
906
|
+
create_request = CreateResourceInstanceRequest(
|
|
907
|
+
resource_type_name="OSResourceType", capabilities=os_capabilities
|
|
908
|
+
)
|
|
909
|
+
result = GriptapeNodes.handle_request(create_request)
|
|
910
|
+
|
|
911
|
+
if not isinstance(result, CreateResourceInstanceResultSuccess):
|
|
912
|
+
logger.error(
|
|
913
|
+
"Attempted to create system OS resource instance. Failed due to resource instance creation failure"
|
|
914
|
+
)
|
|
915
|
+
return
|
|
916
|
+
|
|
917
|
+
logger.debug("Successfully created system OS instance: %s", result.instance_id)
|
|
918
|
+
|
|
919
|
+
def _create_system_cpu_instance(self) -> None:
|
|
920
|
+
"""Create system CPU instance."""
|
|
921
|
+
cpu_capabilities = {
|
|
922
|
+
"cores": os.cpu_count() or 1,
|
|
923
|
+
"architecture": self._get_architecture(),
|
|
924
|
+
}
|
|
925
|
+
create_request = CreateResourceInstanceRequest(
|
|
926
|
+
resource_type_name="CPUResourceType", capabilities=cpu_capabilities
|
|
927
|
+
)
|
|
928
|
+
result = GriptapeNodes.handle_request(create_request)
|
|
929
|
+
|
|
930
|
+
if not isinstance(result, CreateResourceInstanceResultSuccess):
|
|
931
|
+
logger.error(
|
|
932
|
+
"Attempted to create system CPU resource instance. Failed due to resource instance creation failure"
|
|
933
|
+
)
|
|
934
|
+
return
|
|
935
|
+
|
|
936
|
+
logger.debug("Successfully created system CPU instance: %s", result.instance_id)
|
|
937
|
+
|
|
938
|
+
def _get_platform_name(self) -> str:
|
|
939
|
+
"""Get platform name using existing sys.platform detection."""
|
|
940
|
+
if self.is_windows():
|
|
941
|
+
return "windows"
|
|
942
|
+
if self.is_mac():
|
|
943
|
+
return "darwin"
|
|
944
|
+
if self.is_linux():
|
|
945
|
+
return "linux"
|
|
946
|
+
return sys.platform
|
|
947
|
+
|
|
948
|
+
def _get_architecture(self) -> str:
|
|
949
|
+
"""Get system architecture."""
|
|
950
|
+
try:
|
|
951
|
+
return os.uname().machine.lower()
|
|
952
|
+
except AttributeError:
|
|
953
|
+
# Windows doesn't have os.uname(), fallback to environment variable
|
|
954
|
+
return os.environ.get("PROCESSOR_ARCHITECTURE", "unknown").lower()
|
|
955
|
+
|
|
956
|
+
def _get_platform_version(self) -> str:
|
|
957
|
+
"""Get platform version."""
|
|
958
|
+
try:
|
|
959
|
+
return os.uname().release
|
|
960
|
+
except AttributeError:
|
|
961
|
+
# Windows doesn't have os.uname(), return basic platform info
|
|
962
|
+
return sys.platform
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Resource components package."""
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""Capability field definition for resource schemas."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class CapabilityField:
|
|
9
|
+
"""Definition of a single capability field."""
|
|
10
|
+
|
|
11
|
+
name: str
|
|
12
|
+
type_hint: type
|
|
13
|
+
description: str
|
|
14
|
+
required: bool = True
|
|
15
|
+
default: Any = None
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def validate_capabilities(schema: list[CapabilityField], capabilities: dict[str, Any]) -> list[str]:
|
|
19
|
+
"""Validate capabilities against schema and return list of validation errors."""
|
|
20
|
+
errors = []
|
|
21
|
+
|
|
22
|
+
# Check required fields
|
|
23
|
+
for field in schema:
|
|
24
|
+
if field.required and field.name not in capabilities:
|
|
25
|
+
errors.append(f"Required field '{field.name}' is missing") # noqa: PERF401
|
|
26
|
+
|
|
27
|
+
# Check field types (basic validation)
|
|
28
|
+
for field_name, value in capabilities.items():
|
|
29
|
+
schema_field = next((f for f in schema if f.name == field_name), None)
|
|
30
|
+
if schema_field:
|
|
31
|
+
# Handle numeric types (int/float are interchangeable)
|
|
32
|
+
if schema_field.type_hint in (int, float):
|
|
33
|
+
if not isinstance(value, (int, float)):
|
|
34
|
+
errors.append(f"Field '{field_name}' should be numeric, got {type(value).__name__}")
|
|
35
|
+
# Standard type checking for all other types
|
|
36
|
+
elif not isinstance(value, schema_field.type_hint):
|
|
37
|
+
errors.append(
|
|
38
|
+
f"Field '{field_name}' should be a {schema_field.type_hint.__name__}, got {type(value).__name__}"
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
return errors
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from enum import StrEnum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class Comparator(StrEnum):
|
|
5
|
+
"""String-based comparators for resource requirement matching."""
|
|
6
|
+
|
|
7
|
+
EQUALS = "=="
|
|
8
|
+
NOT_EQUALS = "!="
|
|
9
|
+
GREATER_THAN = ">"
|
|
10
|
+
GREATER_THAN_OR_EQUAL = ">="
|
|
11
|
+
LESS_THAN = "<"
|
|
12
|
+
LESS_THAN_OR_EQUAL = "<="
|
|
13
|
+
STARTS_WITH = "startswith"
|
|
14
|
+
INCLUDES = "includes" # substring match
|
|
15
|
+
NOT_PRESENT = "~" # key should not exist
|
|
16
|
+
HAS_ANY = "has_any" # container has any of the required items
|
|
17
|
+
HAS_ALL = "has_all" # container has all of the required items
|
|
18
|
+
CUSTOM = "custom" # allows ResourceType to implement custom comparison logic
|