griptape-nodes 0.53.0__py3-none-any.whl → 0.54.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- griptape_nodes/__init__.py +5 -2
- griptape_nodes/app/app.py +4 -26
- griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +35 -5
- griptape_nodes/bootstrap/workflow_executors/workflow_executor.py +15 -1
- griptape_nodes/cli/commands/config.py +4 -1
- griptape_nodes/cli/commands/init.py +5 -3
- griptape_nodes/cli/commands/libraries.py +14 -8
- griptape_nodes/cli/commands/models.py +504 -0
- griptape_nodes/cli/commands/self.py +5 -2
- griptape_nodes/cli/main.py +11 -1
- griptape_nodes/cli/shared.py +0 -9
- griptape_nodes/common/directed_graph.py +17 -1
- griptape_nodes/drivers/storage/base_storage_driver.py +40 -20
- griptape_nodes/drivers/storage/griptape_cloud_storage_driver.py +24 -29
- griptape_nodes/drivers/storage/local_storage_driver.py +17 -13
- griptape_nodes/exe_types/node_types.py +219 -14
- griptape_nodes/exe_types/param_components/__init__.py +1 -0
- griptape_nodes/exe_types/param_components/execution_status_component.py +138 -0
- griptape_nodes/machines/control_flow.py +129 -92
- griptape_nodes/machines/dag_builder.py +207 -0
- griptape_nodes/machines/parallel_resolution.py +264 -276
- griptape_nodes/machines/sequential_resolution.py +9 -7
- griptape_nodes/node_library/library_registry.py +34 -1
- griptape_nodes/retained_mode/events/app_events.py +5 -1
- griptape_nodes/retained_mode/events/base_events.py +7 -7
- griptape_nodes/retained_mode/events/config_events.py +30 -0
- griptape_nodes/retained_mode/events/execution_events.py +2 -2
- griptape_nodes/retained_mode/events/model_events.py +296 -0
- griptape_nodes/retained_mode/griptape_nodes.py +10 -1
- griptape_nodes/retained_mode/managers/agent_manager.py +14 -0
- griptape_nodes/retained_mode/managers/config_manager.py +44 -3
- griptape_nodes/retained_mode/managers/event_manager.py +8 -2
- griptape_nodes/retained_mode/managers/flow_manager.py +45 -14
- griptape_nodes/retained_mode/managers/library_manager.py +3 -3
- griptape_nodes/retained_mode/managers/model_manager.py +1107 -0
- griptape_nodes/retained_mode/managers/node_manager.py +26 -26
- griptape_nodes/retained_mode/managers/object_manager.py +1 -1
- griptape_nodes/retained_mode/managers/os_manager.py +6 -6
- griptape_nodes/retained_mode/managers/settings.py +87 -9
- griptape_nodes/retained_mode/managers/static_files_manager.py +77 -9
- griptape_nodes/retained_mode/managers/sync_manager.py +10 -5
- griptape_nodes/retained_mode/managers/workflow_manager.py +98 -92
- griptape_nodes/retained_mode/retained_mode.py +19 -0
- griptape_nodes/servers/__init__.py +1 -0
- griptape_nodes/{mcp_server/server.py → servers/mcp.py} +1 -1
- griptape_nodes/{app/api.py → servers/static.py} +43 -40
- griptape_nodes/traits/button.py +124 -6
- griptape_nodes/traits/multi_options.py +188 -0
- griptape_nodes/traits/numbers_selector.py +77 -0
- griptape_nodes/traits/options.py +93 -2
- griptape_nodes/utils/async_utils.py +31 -0
- {griptape_nodes-0.53.0.dist-info → griptape_nodes-0.54.0.dist-info}/METADATA +3 -1
- {griptape_nodes-0.53.0.dist-info → griptape_nodes-0.54.0.dist-info}/RECORD +56 -47
- {griptape_nodes-0.53.0.dist-info → griptape_nodes-0.54.0.dist-info}/WHEEL +1 -1
- /griptape_nodes/{mcp_server → servers}/ws_request_manager.py +0 -0
- {griptape_nodes-0.53.0.dist-info → griptape_nodes-0.54.0.dist-info}/entry_points.txt +0 -0
|
@@ -317,17 +317,22 @@ class WorkflowManager:
|
|
|
317
317
|
|
|
318
318
|
def on_libraries_initialization_complete(self) -> None:
|
|
319
319
|
# All of the libraries have loaded, and any workflows they came with have been registered.
|
|
320
|
-
#
|
|
320
|
+
# Discover workflows from both config and workspace.
|
|
321
321
|
default_workflow_section = "app_events.on_app_initialization_complete.workflows_to_register"
|
|
322
|
+
config_mgr = GriptapeNodes.ConfigManager()
|
|
322
323
|
|
|
323
|
-
|
|
324
|
-
|
|
324
|
+
workflows_to_register = []
|
|
325
|
+
|
|
326
|
+
# Add from config
|
|
327
|
+
config_workflows = config_mgr.get_config_value(default_workflow_section, default=[])
|
|
328
|
+
workflows_to_register.extend(config_workflows)
|
|
325
329
|
|
|
326
|
-
|
|
327
|
-
|
|
330
|
+
# Add from workspace (avoiding duplicates)
|
|
331
|
+
workspace_path = config_mgr.workspace_path
|
|
332
|
+
workflows_to_register.extend([workspace_path])
|
|
328
333
|
|
|
329
|
-
if
|
|
330
|
-
|
|
334
|
+
# Register all discovered workflows at once if any were found
|
|
335
|
+
self._process_workflows_for_registration(workflows_to_register)
|
|
331
336
|
|
|
332
337
|
# Print it all out nicely.
|
|
333
338
|
self.print_workflow_load_status()
|
|
@@ -340,7 +345,6 @@ class WorkflowManager:
|
|
|
340
345
|
paths_to_remove.add(workflow_path.lower())
|
|
341
346
|
|
|
342
347
|
if paths_to_remove:
|
|
343
|
-
config_mgr = GriptapeNodes.ConfigManager()
|
|
344
348
|
workflows_to_register = config_mgr.get_config_value(default_workflow_section)
|
|
345
349
|
if workflows_to_register:
|
|
346
350
|
workflows_to_register = [
|
|
@@ -602,8 +606,8 @@ class WorkflowManager:
|
|
|
602
606
|
if not execution_result.execution_successful:
|
|
603
607
|
result_messages = []
|
|
604
608
|
if context_warning:
|
|
605
|
-
result_messages.append(ResultDetail(message=context_warning, level=
|
|
606
|
-
result_messages.append(ResultDetail(message=execution_result.execution_details, level=
|
|
609
|
+
result_messages.append(ResultDetail(message=context_warning, level=logging.WARNING))
|
|
610
|
+
result_messages.append(ResultDetail(message=execution_result.execution_details, level=logging.ERROR))
|
|
607
611
|
|
|
608
612
|
# Attempt to clear everything out, as we modified the engine state getting here.
|
|
609
613
|
clear_all_request = ClearAllObjectStateRequest(i_know_what_im_doing=True)
|
|
@@ -615,8 +619,8 @@ class WorkflowManager:
|
|
|
615
619
|
# Success!
|
|
616
620
|
result_messages = []
|
|
617
621
|
if context_warning:
|
|
618
|
-
result_messages.append(ResultDetail(message=context_warning, level=
|
|
619
|
-
result_messages.append(ResultDetail(message=execution_result.execution_details, level=
|
|
622
|
+
result_messages.append(ResultDetail(message=context_warning, level=logging.WARNING))
|
|
623
|
+
result_messages.append(ResultDetail(message=execution_result.execution_details, level=logging.DEBUG))
|
|
620
624
|
return RunWorkflowFromRegistryResultSuccess(result_details=ResultDetails(*result_messages))
|
|
621
625
|
|
|
622
626
|
def on_register_workflow_request(self, request: RegisterWorkflowRequest) -> ResultPayload:
|
|
@@ -631,7 +635,8 @@ class WorkflowManager:
|
|
|
631
635
|
return RegisterWorkflowResultSuccess(
|
|
632
636
|
workflow_name=workflow.metadata.name,
|
|
633
637
|
result_details=ResultDetails(
|
|
634
|
-
message=f"Successfully registered workflow: {workflow.metadata.name}",
|
|
638
|
+
message=f"Successfully registered workflow: {workflow.metadata.name}",
|
|
639
|
+
level=logging.DEBUG,
|
|
635
640
|
),
|
|
636
641
|
)
|
|
637
642
|
|
|
@@ -671,7 +676,7 @@ class WorkflowManager:
|
|
|
671
676
|
return ImportWorkflowResultSuccess(
|
|
672
677
|
workflow_name=register_result.workflow_name,
|
|
673
678
|
result_details=ResultDetails(
|
|
674
|
-
message=f"Successfully imported workflow: {register_result.workflow_name}", level=
|
|
679
|
+
message=f"Successfully imported workflow: {register_result.workflow_name}", level=logging.INFO
|
|
675
680
|
),
|
|
676
681
|
)
|
|
677
682
|
|
|
@@ -705,7 +710,7 @@ class WorkflowManager:
|
|
|
705
710
|
details = f"Failed to delete workflow file with path '{workflow.file_path}'. Exception: {e}"
|
|
706
711
|
return DeleteWorkflowResultFailure(result_details=details)
|
|
707
712
|
return DeleteWorkflowResultSuccess(
|
|
708
|
-
result_details=ResultDetails(message=f"Successfully deleted workflow: {request.name}", level=
|
|
713
|
+
result_details=ResultDetails(message=f"Successfully deleted workflow: {request.name}", level=logging.INFO)
|
|
709
714
|
)
|
|
710
715
|
|
|
711
716
|
def on_rename_workflow_request(self, request: RenameWorkflowRequest) -> ResultPayload:
|
|
@@ -722,7 +727,7 @@ class WorkflowManager:
|
|
|
722
727
|
|
|
723
728
|
return RenameWorkflowResultSuccess(
|
|
724
729
|
result_details=ResultDetails(
|
|
725
|
-
message=f"Successfully renamed workflow to: {request.requested_name}", level=
|
|
730
|
+
message=f"Successfully renamed workflow to: {request.requested_name}", level=logging.INFO
|
|
726
731
|
)
|
|
727
732
|
)
|
|
728
733
|
|
|
@@ -784,17 +789,17 @@ class WorkflowManager:
|
|
|
784
789
|
except OSError as e:
|
|
785
790
|
error_messages = []
|
|
786
791
|
main_error = f"Failed to move workflow file '{current_file_path}' to '{new_absolute_path}': {e!s}"
|
|
787
|
-
error_messages.append(ResultDetail(message=main_error, level=
|
|
792
|
+
error_messages.append(ResultDetail(message=main_error, level=logging.ERROR))
|
|
788
793
|
|
|
789
794
|
# Attempt to rollback if file was moved but registry update failed
|
|
790
795
|
if new_absolute_path.exists() and not Path(current_file_path).exists():
|
|
791
796
|
try:
|
|
792
797
|
new_absolute_path.rename(current_file_path)
|
|
793
798
|
rollback_message = f"Rolled back file move for workflow '{request.workflow_name}'"
|
|
794
|
-
error_messages.append(ResultDetail(message=rollback_message, level=
|
|
799
|
+
error_messages.append(ResultDetail(message=rollback_message, level=logging.INFO))
|
|
795
800
|
except OSError:
|
|
796
801
|
rollback_failure = f"Failed to rollback file move for workflow '{request.workflow_name}'"
|
|
797
|
-
error_messages.append(ResultDetail(message=rollback_failure, level=
|
|
802
|
+
error_messages.append(ResultDetail(message=rollback_failure, level=logging.ERROR))
|
|
798
803
|
|
|
799
804
|
return MoveWorkflowResultFailure(result_details=ResultDetails(*error_messages))
|
|
800
805
|
except Exception as e:
|
|
@@ -803,7 +808,7 @@ class WorkflowManager:
|
|
|
803
808
|
else:
|
|
804
809
|
details = f"Successfully moved workflow '{request.workflow_name}' to '{new_relative_path}'"
|
|
805
810
|
return MoveWorkflowResultSuccess(
|
|
806
|
-
moved_file_path=new_relative_path, result_details=ResultDetails(message=details, level=
|
|
811
|
+
moved_file_path=new_relative_path, result_details=ResultDetails(message=details, level=logging.INFO)
|
|
807
812
|
)
|
|
808
813
|
|
|
809
814
|
def on_load_workflow_metadata_request( # noqa: C901, PLR0912, PLR0915
|
|
@@ -1055,21 +1060,7 @@ class WorkflowManager:
|
|
|
1055
1060
|
self.register_list_of_workflows(workflows_to_register)
|
|
1056
1061
|
|
|
1057
1062
|
def register_list_of_workflows(self, workflows_to_register: list[str]) -> None:
|
|
1058
|
-
|
|
1059
|
-
path = Path(workflow_to_register)
|
|
1060
|
-
|
|
1061
|
-
if path.is_dir():
|
|
1062
|
-
# If it's a directory, register all the workflows in it.
|
|
1063
|
-
for workflow_file in path.glob("*.py"):
|
|
1064
|
-
# Check that the python file has script metadata
|
|
1065
|
-
metadata_blocks = self.get_workflow_metadata(
|
|
1066
|
-
workflow_file, block_name=WorkflowManager.WORKFLOW_METADATA_HEADER
|
|
1067
|
-
)
|
|
1068
|
-
if len(metadata_blocks) == 1:
|
|
1069
|
-
self._register_workflow(str(workflow_file))
|
|
1070
|
-
else:
|
|
1071
|
-
# If it's a file, register it directly.
|
|
1072
|
-
self._register_workflow(str(path))
|
|
1063
|
+
self._process_workflows_for_registration(workflows_to_register)
|
|
1073
1064
|
|
|
1074
1065
|
def _register_workflow(self, workflow_to_register: str) -> bool:
|
|
1075
1066
|
"""Registers a workflow from a file.
|
|
@@ -1467,7 +1458,7 @@ class WorkflowManager:
|
|
|
1467
1458
|
existing_workflow.metadata = workflow_metadata
|
|
1468
1459
|
details = f"Successfully saved workflow to: {file_path}"
|
|
1469
1460
|
return SaveWorkflowResultSuccess(
|
|
1470
|
-
file_path=str(file_path), result_details=ResultDetails(message=details, level=
|
|
1461
|
+
file_path=str(file_path), result_details=ResultDetails(message=details, level=logging.INFO)
|
|
1471
1462
|
)
|
|
1472
1463
|
|
|
1473
1464
|
def _generate_workflow_metadata( # noqa: PLR0913
|
|
@@ -1615,7 +1606,7 @@ class WorkflowManager:
|
|
|
1615
1606
|
),
|
|
1616
1607
|
)
|
|
1617
1608
|
|
|
1618
|
-
# Create conditional logic: workflow_executor = workflow_executor or LocalWorkflowExecutor()
|
|
1609
|
+
# Create conditional logic: workflow_executor = workflow_executor or LocalWorkflowExecutor(storage_backend=storage_backend_enum)
|
|
1619
1610
|
executor_assign = ast.Assign(
|
|
1620
1611
|
targets=[ast.Name(id="workflow_executor", ctx=ast.Store())],
|
|
1621
1612
|
value=ast.BoolOp(
|
|
@@ -1625,31 +1616,45 @@ class WorkflowManager:
|
|
|
1625
1616
|
ast.Call(
|
|
1626
1617
|
func=ast.Name(id="LocalWorkflowExecutor", ctx=ast.Load()),
|
|
1627
1618
|
args=[],
|
|
1628
|
-
keywords=[
|
|
1619
|
+
keywords=[
|
|
1620
|
+
ast.keyword(
|
|
1621
|
+
arg="storage_backend", value=ast.Name(id="storage_backend_enum", ctx=ast.Load())
|
|
1622
|
+
),
|
|
1623
|
+
],
|
|
1629
1624
|
),
|
|
1630
1625
|
],
|
|
1631
1626
|
),
|
|
1632
1627
|
)
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
ctx=ast.Load(),
|
|
1640
|
-
),
|
|
1641
|
-
args=[],
|
|
1642
|
-
keywords=[
|
|
1643
|
-
ast.keyword(arg="workflow_name", value=ast.Constant(flow_name)),
|
|
1644
|
-
ast.keyword(arg="flow_input", value=ast.Name(id="input", ctx=ast.Load())),
|
|
1645
|
-
ast.keyword(arg="storage_backend", value=ast.Name(id="storage_backend_enum", ctx=ast.Load())),
|
|
1646
|
-
],
|
|
1628
|
+
# Use async context manager for workflow execution
|
|
1629
|
+
with_stmt = ast.AsyncWith(
|
|
1630
|
+
items=[
|
|
1631
|
+
ast.withitem(
|
|
1632
|
+
context_expr=ast.Name(id="workflow_executor", ctx=ast.Load()),
|
|
1633
|
+
optional_vars=ast.Name(id="executor", ctx=ast.Store()),
|
|
1647
1634
|
)
|
|
1648
|
-
|
|
1635
|
+
],
|
|
1636
|
+
body=[
|
|
1637
|
+
ast.Expr(
|
|
1638
|
+
value=ast.Await(
|
|
1639
|
+
value=ast.Call(
|
|
1640
|
+
func=ast.Attribute(
|
|
1641
|
+
value=ast.Name(id="executor", ctx=ast.Load()),
|
|
1642
|
+
attr="arun",
|
|
1643
|
+
ctx=ast.Load(),
|
|
1644
|
+
),
|
|
1645
|
+
args=[],
|
|
1646
|
+
keywords=[
|
|
1647
|
+
ast.keyword(arg="workflow_name", value=ast.Constant(flow_name)),
|
|
1648
|
+
ast.keyword(arg="flow_input", value=ast.Name(id="input", ctx=ast.Load())),
|
|
1649
|
+
],
|
|
1650
|
+
)
|
|
1651
|
+
)
|
|
1652
|
+
)
|
|
1653
|
+
],
|
|
1649
1654
|
)
|
|
1650
1655
|
return_stmt = ast.Return(
|
|
1651
1656
|
value=ast.Attribute(
|
|
1652
|
-
value=ast.Name(id="
|
|
1657
|
+
value=ast.Name(id="executor", ctx=ast.Load()),
|
|
1653
1658
|
attr="output",
|
|
1654
1659
|
ctx=ast.Load(),
|
|
1655
1660
|
)
|
|
@@ -1659,7 +1664,7 @@ class WorkflowManager:
|
|
|
1659
1664
|
async_func_def = ast.AsyncFunctionDef(
|
|
1660
1665
|
name="aexecute_workflow",
|
|
1661
1666
|
args=args,
|
|
1662
|
-
body=[ensure_context_call, storage_backend_convert, executor_assign,
|
|
1667
|
+
body=[ensure_context_call, storage_backend_convert, executor_assign, with_stmt, return_stmt],
|
|
1663
1668
|
decorator_list=[],
|
|
1664
1669
|
returns=return_annotation,
|
|
1665
1670
|
type_params=[],
|
|
@@ -3196,15 +3201,15 @@ class WorkflowManager:
|
|
|
3196
3201
|
result_messages = []
|
|
3197
3202
|
if isinstance(register_workflow_result, RegisterWorkflowResultSuccess):
|
|
3198
3203
|
success_message = f"Successfully registered new workflow with file '{workflow_file.name}'."
|
|
3199
|
-
result_messages.append(ResultDetail(message=success_message, level=
|
|
3204
|
+
result_messages.append(ResultDetail(message=success_message, level=logging.INFO))
|
|
3200
3205
|
else:
|
|
3201
3206
|
failure_message = f"Failed to register workflow with file '{workflow_file.name}': {cast('RegisterWorkflowResultFailure', register_workflow_result).exception}"
|
|
3202
|
-
result_messages.append(ResultDetail(message=failure_message, level=
|
|
3207
|
+
result_messages.append(ResultDetail(message=failure_message, level=logging.WARNING))
|
|
3203
3208
|
else:
|
|
3204
3209
|
metadata_failure_message = (
|
|
3205
3210
|
f"Failed to load metadata for workflow file '{workflow_file.name}'. Not registering workflow."
|
|
3206
3211
|
)
|
|
3207
|
-
result_messages = [ResultDetail(message=metadata_failure_message, level=
|
|
3212
|
+
result_messages = [ResultDetail(message=metadata_failure_message, level=logging.WARNING)]
|
|
3208
3213
|
|
|
3209
3214
|
# Log all messages through consolidated ResultDetails
|
|
3210
3215
|
ResultDetails(*result_messages)
|
|
@@ -3402,7 +3407,7 @@ class WorkflowManager:
|
|
|
3402
3407
|
return BranchWorkflowResultSuccess(
|
|
3403
3408
|
branched_workflow_name=branch_name,
|
|
3404
3409
|
original_workflow_name=request.workflow_name,
|
|
3405
|
-
result_details=ResultDetails(message=details, level=
|
|
3410
|
+
result_details=ResultDetails(message=details, level=logging.INFO),
|
|
3406
3411
|
)
|
|
3407
3412
|
|
|
3408
3413
|
except Exception as e:
|
|
@@ -3476,16 +3481,16 @@ class WorkflowManager:
|
|
|
3476
3481
|
WorkflowRegistry.delete_workflow_by_name(request.workflow_name)
|
|
3477
3482
|
Path(branch_content_file_path).unlink()
|
|
3478
3483
|
cleanup_message = f"Deleted branch workflow file and registry entry for '{request.workflow_name}'"
|
|
3479
|
-
result_messages.append(ResultDetail(message=cleanup_message, level=
|
|
3484
|
+
result_messages.append(ResultDetail(message=cleanup_message, level=logging.INFO))
|
|
3480
3485
|
except Exception as delete_error:
|
|
3481
3486
|
warning_message = (
|
|
3482
3487
|
f"Failed to fully clean up branch workflow '{request.workflow_name}': {delete_error!s}"
|
|
3483
3488
|
)
|
|
3484
|
-
result_messages.append(ResultDetail(message=warning_message, level=
|
|
3489
|
+
result_messages.append(ResultDetail(message=warning_message, level=logging.WARNING))
|
|
3485
3490
|
# Continue anyway - the merge was successful even if cleanup failed
|
|
3486
3491
|
|
|
3487
3492
|
success_message = f"Successfully merged branch workflow '{request.workflow_name}' into source workflow '{source_workflow_name}'"
|
|
3488
|
-
result_messages.append(ResultDetail(message=success_message, level=
|
|
3493
|
+
result_messages.append(ResultDetail(message=success_message, level=logging.INFO))
|
|
3489
3494
|
|
|
3490
3495
|
return MergeWorkflowBranchResultSuccess(
|
|
3491
3496
|
merged_workflow_name=source_workflow_name, result_details=ResultDetails(*result_messages)
|
|
@@ -3559,7 +3564,8 @@ class WorkflowManager:
|
|
|
3559
3564
|
else:
|
|
3560
3565
|
details = f"Successfully reset branch workflow '{request.workflow_name}' to match source workflow '{source_workflow_name}'"
|
|
3561
3566
|
return ResetWorkflowBranchResultSuccess(
|
|
3562
|
-
reset_workflow_name=request.workflow_name,
|
|
3567
|
+
reset_workflow_name=request.workflow_name,
|
|
3568
|
+
result_details=ResultDetails(message=details, level=logging.INFO),
|
|
3563
3569
|
)
|
|
3564
3570
|
|
|
3565
3571
|
def on_compare_workflows_request(self, request: CompareWorkflowsRequest) -> ResultPayload:
|
|
@@ -3807,7 +3813,7 @@ class WorkflowManager:
|
|
|
3807
3813
|
failed_workflows=failed,
|
|
3808
3814
|
result_details=ResultDetails(
|
|
3809
3815
|
message=f"Successfully processed workflows: {len(succeeded)} succeeded, {len(failed)} failed.",
|
|
3810
|
-
level=
|
|
3816
|
+
level=logging.INFO,
|
|
3811
3817
|
),
|
|
3812
3818
|
)
|
|
3813
3819
|
|
|
@@ -3820,33 +3826,9 @@ class WorkflowManager:
|
|
|
3820
3826
|
succeeded = []
|
|
3821
3827
|
failed = []
|
|
3822
3828
|
|
|
3823
|
-
|
|
3824
|
-
|
|
3825
|
-
|
|
3826
|
-
if path.is_dir():
|
|
3827
|
-
dir_result = self._process_workflow_directory(path)
|
|
3828
|
-
succeeded.extend(dir_result.succeeded)
|
|
3829
|
-
failed.extend(dir_result.failed)
|
|
3830
|
-
elif path.suffix == ".py":
|
|
3831
|
-
workflow_name = self._process_single_workflow_file(path)
|
|
3832
|
-
if workflow_name:
|
|
3833
|
-
succeeded.append(workflow_name)
|
|
3834
|
-
else:
|
|
3835
|
-
failed.append(str(path))
|
|
3836
|
-
|
|
3837
|
-
return WorkflowRegistrationResult(succeeded=succeeded, failed=failed)
|
|
3838
|
-
|
|
3839
|
-
def _process_workflow_directory(self, directory_path: Path) -> WorkflowRegistrationResult:
|
|
3840
|
-
"""Process all workflow files in a directory.
|
|
3841
|
-
|
|
3842
|
-
Returns:
|
|
3843
|
-
WorkflowRegistrationResult with succeeded and failed workflow names
|
|
3844
|
-
"""
|
|
3845
|
-
succeeded = []
|
|
3846
|
-
failed = []
|
|
3847
|
-
|
|
3848
|
-
for workflow_file in directory_path.glob("*.py"):
|
|
3849
|
-
# Check that the python file has script metadata
|
|
3829
|
+
def process_workflow_file(workflow_file: Path) -> None:
|
|
3830
|
+
"""Process a single workflow file for registration."""
|
|
3831
|
+
# Check if the file has workflow metadata before processing
|
|
3850
3832
|
metadata_blocks = self.get_workflow_metadata(
|
|
3851
3833
|
workflow_file, block_name=WorkflowManager.WORKFLOW_METADATA_HEADER
|
|
3852
3834
|
)
|
|
@@ -3857,6 +3839,18 @@ class WorkflowManager:
|
|
|
3857
3839
|
else:
|
|
3858
3840
|
failed.append(str(workflow_file))
|
|
3859
3841
|
|
|
3842
|
+
def process_path(path: Path) -> None:
|
|
3843
|
+
"""Process a path, handling both files and directories."""
|
|
3844
|
+
if path.is_dir():
|
|
3845
|
+
# Process all Python files recursively in the directory
|
|
3846
|
+
for workflow_file in path.rglob("*.py"):
|
|
3847
|
+
process_workflow_file(workflow_file)
|
|
3848
|
+
elif path.suffix == ".py":
|
|
3849
|
+
process_workflow_file(path)
|
|
3850
|
+
|
|
3851
|
+
for workflow_to_register in workflows_to_register:
|
|
3852
|
+
process_path(Path(workflow_to_register))
|
|
3853
|
+
|
|
3860
3854
|
return WorkflowRegistrationResult(succeeded=succeeded, failed=failed)
|
|
3861
3855
|
|
|
3862
3856
|
def _process_single_workflow_file(self, workflow_file: Path) -> str | None:
|
|
@@ -3865,6 +3859,8 @@ class WorkflowManager:
|
|
|
3865
3859
|
Returns:
|
|
3866
3860
|
Workflow name if registered successfully, None if failed or skipped
|
|
3867
3861
|
"""
|
|
3862
|
+
from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
|
|
3863
|
+
|
|
3868
3864
|
# Parse metadata once and use it for both registration check and actual registration
|
|
3869
3865
|
load_metadata_request = LoadWorkflowMetadata(file_name=str(workflow_file))
|
|
3870
3866
|
load_metadata_result = self.on_load_workflow_metadata_request(load_metadata_request)
|
|
@@ -3880,10 +3876,20 @@ class WorkflowManager:
|
|
|
3880
3876
|
logger.debug("Skipping already registered workflow: %s", workflow_file)
|
|
3881
3877
|
return None
|
|
3882
3878
|
|
|
3879
|
+
# Convert to relative path if the workflow is under workspace_path
|
|
3880
|
+
config_mgr = GriptapeNodes.ConfigManager()
|
|
3881
|
+
workspace_path = config_mgr.workspace_path
|
|
3882
|
+
|
|
3883
|
+
if workflow_file.is_relative_to(workspace_path):
|
|
3884
|
+
relative_path = workflow_file.relative_to(workspace_path)
|
|
3885
|
+
file_path_to_register = str(relative_path)
|
|
3886
|
+
else:
|
|
3887
|
+
file_path_to_register = str(workflow_file)
|
|
3888
|
+
|
|
3883
3889
|
# Register workflow using existing method with parsed metadata available
|
|
3884
3890
|
# The _register_workflow method will re-parse metadata, but this is acceptable
|
|
3885
3891
|
# since we've already validated it's parseable and the duplicate work is minimal
|
|
3886
|
-
if self._register_workflow(
|
|
3892
|
+
if self._register_workflow(file_path_to_register):
|
|
3887
3893
|
return workflow_metadata.name
|
|
3888
3894
|
return None
|
|
3889
3895
|
|
|
@@ -10,6 +10,7 @@ from griptape_nodes.retained_mode.events.base_events import (
|
|
|
10
10
|
)
|
|
11
11
|
from griptape_nodes.retained_mode.events.config_events import (
|
|
12
12
|
GetConfigCategoryRequest,
|
|
13
|
+
GetConfigSchemaRequest,
|
|
13
14
|
GetConfigValueRequest,
|
|
14
15
|
SetConfigCategoryRequest,
|
|
15
16
|
SetConfigValueRequest,
|
|
@@ -1396,6 +1397,24 @@ class RetainedMode:
|
|
|
1396
1397
|
result = GriptapeNodes().handle_request(request)
|
|
1397
1398
|
return result
|
|
1398
1399
|
|
|
1400
|
+
@classmethod
|
|
1401
|
+
def get_config_schema(cls) -> ResultPayload:
|
|
1402
|
+
"""Gets the JSON schema for the configuration model.
|
|
1403
|
+
|
|
1404
|
+
Returns:
|
|
1405
|
+
ResultPayload: Contains the configuration schema with field types, enums, and validation rules.
|
|
1406
|
+
|
|
1407
|
+
Example:
|
|
1408
|
+
# Get the configuration schema
|
|
1409
|
+
schema_result = cmd.get_config_schema()
|
|
1410
|
+
if isinstance(schema_result, GetConfigSchemaResultSuccess):
|
|
1411
|
+
schema = schema_result.schema
|
|
1412
|
+
# Use schema to render appropriate UI components
|
|
1413
|
+
"""
|
|
1414
|
+
request = GetConfigSchemaRequest()
|
|
1415
|
+
result = GriptapeNodes().handle_request(request)
|
|
1416
|
+
return result
|
|
1417
|
+
|
|
1399
1418
|
@classmethod
|
|
1400
1419
|
def rename(cls, object_name: str, requested_name: str) -> ResultPayload:
|
|
1401
1420
|
"""Renames a node or flow.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Package for web servers the engine may need to start."""
|
|
@@ -16,7 +16,6 @@ from pydantic import TypeAdapter
|
|
|
16
16
|
from rich.logging import RichHandler
|
|
17
17
|
from starlette.types import Receive, Scope, Send
|
|
18
18
|
|
|
19
|
-
from griptape_nodes.mcp_server.ws_request_manager import AsyncRequestManager, WebSocketConnectionManager
|
|
20
19
|
from griptape_nodes.retained_mode.events.base_events import RequestPayload
|
|
21
20
|
from griptape_nodes.retained_mode.events.connection_events import (
|
|
22
21
|
CreateConnectionRequest,
|
|
@@ -38,6 +37,7 @@ from griptape_nodes.retained_mode.events.parameter_events import (
|
|
|
38
37
|
)
|
|
39
38
|
from griptape_nodes.retained_mode.managers.config_manager import ConfigManager
|
|
40
39
|
from griptape_nodes.retained_mode.managers.secrets_manager import SecretsManager
|
|
40
|
+
from griptape_nodes.servers.ws_request_manager import AsyncRequestManager, WebSocketConnectionManager
|
|
41
41
|
|
|
42
42
|
SUPPORTED_REQUEST_EVENTS: dict[str, type[RequestPayload]] = {
|
|
43
43
|
# Nodes
|
|
@@ -4,15 +4,16 @@ import binascii
|
|
|
4
4
|
import logging
|
|
5
5
|
import os
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Annotated
|
|
8
7
|
from urllib.parse import urljoin
|
|
9
8
|
|
|
10
9
|
import uvicorn
|
|
11
|
-
from fastapi import
|
|
10
|
+
from fastapi import FastAPI, HTTPException, Request
|
|
12
11
|
from fastapi.middleware.cors import CORSMiddleware
|
|
13
12
|
from fastapi.staticfiles import StaticFiles
|
|
14
13
|
from rich.logging import RichHandler
|
|
15
14
|
|
|
15
|
+
from griptape_nodes.retained_mode.griptape_nodes import GriptapeNodes
|
|
16
|
+
|
|
16
17
|
# Whether to enable the static server
|
|
17
18
|
STATIC_SERVER_ENABLED = os.getenv("STATIC_SERVER_ENABLED", "true").lower() == "true"
|
|
18
19
|
# Host of the static server
|
|
@@ -20,7 +21,7 @@ STATIC_SERVER_HOST = os.getenv("STATIC_SERVER_HOST", "localhost")
|
|
|
20
21
|
# Port of the static server
|
|
21
22
|
STATIC_SERVER_PORT = int(os.getenv("STATIC_SERVER_PORT", "8124"))
|
|
22
23
|
# URL path for the static server
|
|
23
|
-
STATIC_SERVER_URL = os.getenv("STATIC_SERVER_URL", "/
|
|
24
|
+
STATIC_SERVER_URL = os.getenv("STATIC_SERVER_URL", "/workspace")
|
|
24
25
|
# Log level for the static server
|
|
25
26
|
STATIC_SERVER_LOG_LEVEL = os.getenv("STATIC_SERVER_LOG_LEVEL", "ERROR").lower()
|
|
26
27
|
|
|
@@ -28,18 +29,6 @@ logger = logging.getLogger("griptape_nodes_api")
|
|
|
28
29
|
logging.getLogger("uvicorn").addHandler(RichHandler(show_time=True, show_path=False, markup=True, rich_tracebacks=True))
|
|
29
30
|
|
|
30
31
|
|
|
31
|
-
# Global static directory - initialized as None and set when starting the API
|
|
32
|
-
static_dir: Path | None = None
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def get_static_dir() -> Path:
|
|
36
|
-
"""FastAPI dependency to get the static directory."""
|
|
37
|
-
if static_dir is None:
|
|
38
|
-
msg = "Static directory is not initialized"
|
|
39
|
-
raise HTTPException(status_code=500, detail=msg)
|
|
40
|
-
return static_dir
|
|
41
|
-
|
|
42
|
-
|
|
43
32
|
"""Create and configure the FastAPI application."""
|
|
44
33
|
app = FastAPI()
|
|
45
34
|
|
|
@@ -52,35 +41,34 @@ async def _create_static_file_upload_url(request: Request) -> dict:
|
|
|
52
41
|
"""
|
|
53
42
|
base_url = request.base_url
|
|
54
43
|
body = await request.json()
|
|
55
|
-
|
|
56
|
-
url = urljoin(str(base_url), f"/static-uploads/{
|
|
44
|
+
file_path = body["file_path"].lstrip("/")
|
|
45
|
+
url = urljoin(str(base_url), f"/static-uploads/{file_path}")
|
|
57
46
|
|
|
58
47
|
return {"url": url}
|
|
59
48
|
|
|
60
49
|
|
|
61
50
|
@app.put("/static-uploads/{file_path:path}")
|
|
62
|
-
async def _create_static_file(
|
|
63
|
-
request: Request, file_path: str, static_directory: Annotated[Path, Depends(get_static_dir)]
|
|
64
|
-
) -> dict:
|
|
51
|
+
async def _create_static_file(request: Request, file_path: str) -> dict:
|
|
65
52
|
"""Upload a static file to the static server."""
|
|
66
53
|
if not STATIC_SERVER_ENABLED:
|
|
67
54
|
msg = "Static server is not enabled. Please set STATIC_SERVER_ENABLED to True."
|
|
68
55
|
raise ValueError(msg)
|
|
69
56
|
|
|
70
|
-
|
|
57
|
+
workspace_directory = Path(GriptapeNodes.ConfigManager().get_config_value("workspace_directory"))
|
|
58
|
+
full_file_path = workspace_directory / file_path
|
|
71
59
|
|
|
72
60
|
# Create parent directories if they don't exist
|
|
73
|
-
|
|
61
|
+
full_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
74
62
|
|
|
75
63
|
data = await request.body()
|
|
76
64
|
try:
|
|
77
|
-
|
|
65
|
+
full_file_path.write_bytes(data)
|
|
78
66
|
except binascii.Error as e:
|
|
79
67
|
msg = f"Invalid base64 encoding for file {file_path}."
|
|
80
68
|
logger.error(msg)
|
|
81
69
|
raise HTTPException(status_code=400, detail=msg) from e
|
|
82
70
|
except (OSError, PermissionError) as e:
|
|
83
|
-
msg = f"Failed to write file {
|
|
71
|
+
msg = f"Failed to write file {full_file_path}: {e}"
|
|
84
72
|
logger.error(msg)
|
|
85
73
|
raise HTTPException(status_code=500, detail=msg) from e
|
|
86
74
|
|
|
@@ -88,19 +76,28 @@ async def _create_static_file(
|
|
|
88
76
|
return {"url": static_url}
|
|
89
77
|
|
|
90
78
|
|
|
79
|
+
@app.get("/static-uploads/{file_path_prefix:path}")
|
|
91
80
|
@app.get("/static-uploads/")
|
|
92
|
-
async def _list_static_files(
|
|
93
|
-
"""List
|
|
81
|
+
async def _list_static_files(file_path_prefix: str = "") -> dict:
|
|
82
|
+
"""List static files in the static server under the specified path prefix."""
|
|
94
83
|
if not STATIC_SERVER_ENABLED:
|
|
95
84
|
msg = "Static server is not enabled. Please set STATIC_SERVER_ENABLED to True."
|
|
96
85
|
raise HTTPException(status_code=500, detail=msg)
|
|
97
86
|
|
|
87
|
+
workspace_directory = Path(GriptapeNodes.ConfigManager().get_config_value("workspace_directory"))
|
|
88
|
+
|
|
89
|
+
# Handle the prefix path
|
|
90
|
+
if file_path_prefix:
|
|
91
|
+
target_directory = workspace_directory / file_path_prefix
|
|
92
|
+
else:
|
|
93
|
+
target_directory = workspace_directory
|
|
94
|
+
|
|
98
95
|
try:
|
|
99
96
|
file_names = []
|
|
100
|
-
if
|
|
101
|
-
for file_path in
|
|
97
|
+
if target_directory.exists() and target_directory.is_dir():
|
|
98
|
+
for file_path in target_directory.rglob("*"):
|
|
102
99
|
if file_path.is_file():
|
|
103
|
-
relative_path = file_path.relative_to(
|
|
100
|
+
relative_path = file_path.relative_to(workspace_directory)
|
|
104
101
|
file_names.append(str(relative_path))
|
|
105
102
|
except (OSError, PermissionError) as e:
|
|
106
103
|
msg = f"Failed to list files in static directory: {e}"
|
|
@@ -111,13 +108,14 @@ async def _list_static_files(static_directory: Annotated[Path, Depends(get_stati
|
|
|
111
108
|
|
|
112
109
|
|
|
113
110
|
@app.delete("/static-files/{file_path:path}")
|
|
114
|
-
async def _delete_static_file(file_path: str
|
|
111
|
+
async def _delete_static_file(file_path: str) -> dict:
|
|
115
112
|
"""Delete a static file from the static server."""
|
|
116
113
|
if not STATIC_SERVER_ENABLED:
|
|
117
114
|
msg = "Static server is not enabled. Please set STATIC_SERVER_ENABLED to True."
|
|
118
115
|
raise HTTPException(status_code=500, detail=msg)
|
|
119
116
|
|
|
120
|
-
|
|
117
|
+
workspace_directory = Path(GriptapeNodes.ConfigManager().get_config_value("workspace_directory"))
|
|
118
|
+
file_full_path = workspace_directory / file_path
|
|
121
119
|
|
|
122
120
|
# Check if file exists
|
|
123
121
|
if not file_full_path.exists():
|
|
@@ -141,13 +139,10 @@ async def _delete_static_file(file_path: str, static_directory: Annotated[Path,
|
|
|
141
139
|
return {"message": f"File {file_path} deleted successfully"}
|
|
142
140
|
|
|
143
141
|
|
|
144
|
-
def _setup_app(
|
|
142
|
+
def _setup_app() -> None:
|
|
145
143
|
"""Setup FastAPI app with middleware and static files."""
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
if not static_dir.exists():
|
|
150
|
-
static_dir.mkdir(parents=True, exist_ok=True)
|
|
144
|
+
workspace_directory = Path(GriptapeNodes.ConfigManager().get_config_value("workspace_directory"))
|
|
145
|
+
static_files_directory = Path(GriptapeNodes.ConfigManager().get_config_value("static_files_directory"))
|
|
151
146
|
|
|
152
147
|
app.add_middleware(
|
|
153
148
|
CORSMiddleware,
|
|
@@ -163,15 +158,23 @@ def _setup_app(static_directory: Path) -> None:
|
|
|
163
158
|
|
|
164
159
|
app.mount(
|
|
165
160
|
STATIC_SERVER_URL,
|
|
166
|
-
StaticFiles(directory=
|
|
161
|
+
StaticFiles(directory=workspace_directory),
|
|
162
|
+
name="workspace",
|
|
163
|
+
)
|
|
164
|
+
static_files_path = workspace_directory / static_files_directory
|
|
165
|
+
static_files_path.mkdir(parents=True, exist_ok=True)
|
|
166
|
+
# For legacy urls
|
|
167
|
+
app.mount(
|
|
168
|
+
"/static",
|
|
169
|
+
StaticFiles(directory=workspace_directory / static_files_directory),
|
|
167
170
|
name="static",
|
|
168
171
|
)
|
|
169
172
|
|
|
170
173
|
|
|
171
|
-
def start_static_server(
|
|
174
|
+
def start_static_server() -> None:
|
|
172
175
|
"""Run uvicorn server synchronously using uvicorn.run."""
|
|
173
176
|
# Setup the FastAPI app
|
|
174
|
-
_setup_app(
|
|
177
|
+
_setup_app()
|
|
175
178
|
|
|
176
179
|
try:
|
|
177
180
|
# Run server using uvicorn.run
|