griptape-nodes 0.65.6__py3-none-any.whl → 0.66.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. griptape_nodes/common/node_executor.py +352 -27
  2. griptape_nodes/drivers/storage/base_storage_driver.py +12 -3
  3. griptape_nodes/drivers/storage/griptape_cloud_storage_driver.py +18 -2
  4. griptape_nodes/drivers/storage/local_storage_driver.py +42 -5
  5. griptape_nodes/exe_types/base_iterative_nodes.py +0 -1
  6. griptape_nodes/exe_types/connections.py +42 -0
  7. griptape_nodes/exe_types/core_types.py +2 -2
  8. griptape_nodes/exe_types/node_groups/__init__.py +2 -1
  9. griptape_nodes/exe_types/node_groups/base_iterative_node_group.py +177 -0
  10. griptape_nodes/exe_types/node_groups/base_node_group.py +1 -0
  11. griptape_nodes/exe_types/node_groups/subflow_node_group.py +35 -2
  12. griptape_nodes/exe_types/param_types/parameter_audio.py +1 -1
  13. griptape_nodes/exe_types/param_types/parameter_bool.py +1 -1
  14. griptape_nodes/exe_types/param_types/parameter_button.py +1 -1
  15. griptape_nodes/exe_types/param_types/parameter_float.py +1 -1
  16. griptape_nodes/exe_types/param_types/parameter_image.py +1 -1
  17. griptape_nodes/exe_types/param_types/parameter_int.py +1 -1
  18. griptape_nodes/exe_types/param_types/parameter_number.py +1 -1
  19. griptape_nodes/exe_types/param_types/parameter_string.py +1 -1
  20. griptape_nodes/exe_types/param_types/parameter_three_d.py +1 -1
  21. griptape_nodes/exe_types/param_types/parameter_video.py +1 -1
  22. griptape_nodes/machines/control_flow.py +5 -4
  23. griptape_nodes/machines/dag_builder.py +121 -55
  24. griptape_nodes/machines/fsm.py +10 -0
  25. griptape_nodes/machines/parallel_resolution.py +39 -38
  26. griptape_nodes/machines/sequential_resolution.py +29 -3
  27. griptape_nodes/node_library/library_registry.py +41 -2
  28. griptape_nodes/retained_mode/events/library_events.py +147 -8
  29. griptape_nodes/retained_mode/events/os_events.py +12 -4
  30. griptape_nodes/retained_mode/managers/fitness_problems/libraries/__init__.py +2 -0
  31. griptape_nodes/retained_mode/managers/fitness_problems/libraries/incompatible_requirements_problem.py +34 -0
  32. griptape_nodes/retained_mode/managers/flow_manager.py +133 -20
  33. griptape_nodes/retained_mode/managers/library_manager.py +1324 -564
  34. griptape_nodes/retained_mode/managers/node_manager.py +9 -3
  35. griptape_nodes/retained_mode/managers/os_manager.py +429 -65
  36. griptape_nodes/retained_mode/managers/resource_types/compute_resource.py +82 -0
  37. griptape_nodes/retained_mode/managers/resource_types/os_resource.py +17 -0
  38. griptape_nodes/retained_mode/managers/static_files_manager.py +21 -8
  39. griptape_nodes/retained_mode/managers/version_compatibility_manager.py +3 -3
  40. griptape_nodes/utils/git_utils.py +2 -17
  41. griptape_nodes/version_compatibility/versions/v0_39_0/modified_parameters_set_removal.py +5 -5
  42. griptape_nodes/version_compatibility/versions/v0_65_4/__init__.py +5 -0
  43. griptape_nodes/version_compatibility/versions/v0_65_4/run_in_parallel_to_run_in_order.py +79 -0
  44. griptape_nodes/version_compatibility/versions/v0_65_5/__init__.py +5 -0
  45. griptape_nodes/version_compatibility/versions/v0_65_5/flux_2_removed_parameters.py +85 -0
  46. {griptape_nodes-0.65.6.dist-info → griptape_nodes-0.66.0.dist-info}/METADATA +1 -1
  47. {griptape_nodes-0.65.6.dist-info → griptape_nodes-0.66.0.dist-info}/RECORD +49 -54
  48. griptape_nodes/retained_mode/managers/library_lifecycle/__init__.py +0 -45
  49. griptape_nodes/retained_mode/managers/library_lifecycle/data_models.py +0 -191
  50. griptape_nodes/retained_mode/managers/library_lifecycle/library_directory.py +0 -346
  51. griptape_nodes/retained_mode/managers/library_lifecycle/library_fsm.py +0 -439
  52. griptape_nodes/retained_mode/managers/library_lifecycle/library_provenance/__init__.py +0 -17
  53. griptape_nodes/retained_mode/managers/library_lifecycle/library_provenance/base.py +0 -82
  54. griptape_nodes/retained_mode/managers/library_lifecycle/library_provenance/github.py +0 -116
  55. griptape_nodes/retained_mode/managers/library_lifecycle/library_provenance/local_file.py +0 -367
  56. griptape_nodes/retained_mode/managers/library_lifecycle/library_provenance/package.py +0 -104
  57. griptape_nodes/retained_mode/managers/library_lifecycle/library_provenance/sandbox.py +0 -155
  58. griptape_nodes/retained_mode/managers/library_lifecycle/library_provenance.py +0 -18
  59. griptape_nodes/retained_mode/managers/library_lifecycle/library_status.py +0 -12
  60. {griptape_nodes-0.65.6.dist-info → griptape_nodes-0.66.0.dist-info}/WHEEL +0 -0
  61. {griptape_nodes-0.65.6.dist-info → griptape_nodes-0.66.0.dist-info}/entry_points.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from dataclasses import dataclass
4
- from typing import TYPE_CHECKING
4
+ from typing import TYPE_CHECKING, NamedTuple
5
5
 
6
6
  from griptape_nodes.retained_mode.events.base_events import (
7
7
  RequestPayload,
@@ -13,9 +13,23 @@ from griptape_nodes.retained_mode.events.base_events import (
13
13
  from griptape_nodes.retained_mode.events.payload_registry import PayloadRegistry
14
14
 
15
15
  if TYPE_CHECKING:
16
+ from pathlib import Path
17
+
16
18
  from griptape_nodes.node_library.library_registry import LibraryMetadata, LibrarySchema, NodeMetadata
17
19
  from griptape_nodes.retained_mode.managers.fitness_problems.libraries import LibraryProblem
18
- from griptape_nodes.retained_mode.managers.library_lifecycle.library_status import LibraryStatus
20
+ from griptape_nodes.retained_mode.managers.library_manager import LibraryManager
21
+
22
+
23
+ class DiscoveredLibrary(NamedTuple):
24
+ """Information about a discovered library.
25
+
26
+ Attributes:
27
+ path: Absolute path to the library JSON file or sandbox directory
28
+ is_sandbox: True if this is a sandbox library (user-created nodes in workspace), False for regular libraries
29
+ """
30
+
31
+ path: Path
32
+ is_sandbox: bool
19
33
 
20
34
 
21
35
  @dataclass
@@ -204,7 +218,7 @@ class LoadLibraryMetadataFromFileResultFailure(WorkflowNotAlteredMixin, ResultPa
204
218
  library_path: Path to the library file that failed to load.
205
219
  library_name: Name of the library if it could be extracted from the JSON,
206
220
  None if the name couldn't be determined.
207
- status: The LibraryStatus enum indicating the type of failure
221
+ status: The LibraryFitness enum indicating the type of failure
208
222
  (MISSING, UNUSABLE, etc.).
209
223
  problems: List of specific problems encountered during loading
210
224
  (file not found, JSON parse errors, validation failures, etc.).
@@ -212,7 +226,7 @@ class LoadLibraryMetadataFromFileResultFailure(WorkflowNotAlteredMixin, ResultPa
212
226
 
213
227
  library_path: str
214
228
  library_name: str | None
215
- status: LibraryStatus
229
+ status: LibraryManager.LibraryFitness
216
230
  problems: list[LibraryProblem]
217
231
 
218
232
 
@@ -260,22 +274,74 @@ class LoadMetadataForAllLibrariesResultFailure(WorkflowNotAlteredMixin, ResultPa
260
274
  """
261
275
 
262
276
 
277
+ @dataclass
278
+ @PayloadRegistry.register
279
+ class ScanSandboxDirectoryRequest(RequestPayload):
280
+ """Scan sandbox directory and generate/update library metadata.
281
+
282
+ This request triggers a scan of a sandbox directory,
283
+ discovers Python files, and either creates a new library schema or
284
+ merges with an existing griptape_nodes_library.json if present.
285
+
286
+ Use when: Manually triggering sandbox refresh, testing sandbox setup,
287
+ forcing regeneration of sandbox library metadata.
288
+
289
+ Args:
290
+ directory_path: Path to sandbox directory to scan (required).
291
+
292
+ Results: ScanSandboxDirectoryResultSuccess | ScanSandboxDirectoryResultFailure
293
+ """
294
+
295
+ directory_path: str
296
+
297
+
298
+ @dataclass
299
+ @PayloadRegistry.register
300
+ class ScanSandboxDirectoryResultSuccess(WorkflowNotAlteredMixin, ResultPayloadSuccess):
301
+ """Sandbox directory scanned successfully.
302
+
303
+ Args:
304
+ library_schema: The generated or merged LibrarySchema
305
+ """
306
+
307
+ library_schema: LibrarySchema
308
+
309
+
310
+ @dataclass
311
+ @PayloadRegistry.register
312
+ class ScanSandboxDirectoryResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure):
313
+ """Sandbox directory scan failed.
314
+
315
+ Common causes: directory doesn't exist, no Python files found, internal error.
316
+ """
317
+
318
+
263
319
  @dataclass
264
320
  @PayloadRegistry.register
265
321
  class RegisterLibraryFromFileRequest(RequestPayload):
266
- """Register a library from a JSON file.
322
+ """Register a library by name or path, progressing through all lifecycle phases.
323
+
324
+ This request handles the complete library loading lifecycle:
325
+ DISCOVERED → METADATA_LOADED → EVALUATED → DEPENDENCIES_INSTALLED → LOADED
326
+
327
+ The handler automatically creates LibraryInfo if not already tracked, making it suitable
328
+ for both internal use (from load_all_libraries_from_config) and external use (scripts, tests, API).
267
329
 
268
330
  Use when: Loading custom libraries, adding new node types,
269
331
  registering development libraries, extending node capabilities.
270
332
 
271
333
  Args:
272
- file_path: Path to the library JSON file to register
273
- load_as_default_library: Whether to load as the default library (default: False)
334
+ library_name: Name of library to load (must match library JSON 'name' field). Either library_name OR file_path required (not both).
335
+ file_path: Path to library JSON file. Either library_name OR file_path required (not both).
336
+ perform_discovery_if_not_found: If True and library not found, trigger discovery (default: False)
337
+ load_as_default_library: Whether to mark this library as the default (default: False)
274
338
 
275
339
  Results: RegisterLibraryFromFileResultSuccess (with library name) | RegisterLibraryFromFileResultFailure (load error)
276
340
  """
277
341
 
278
- file_path: str
342
+ library_name: str | None = None
343
+ file_path: str | None = None
344
+ perform_discovery_if_not_found: bool = False
279
345
  load_as_default_library: bool = False
280
346
 
281
347
 
@@ -517,6 +583,79 @@ class ReloadAllLibrariesResultFailure(ResultPayloadFailure):
517
583
  """Library reload failed. Common causes: library loading errors, system constraints, initialization failures."""
518
584
 
519
585
 
586
+ @dataclass
587
+ @PayloadRegistry.register
588
+ class DiscoverLibrariesRequest(RequestPayload):
589
+ """Discover all libraries from configuration.
590
+
591
+ Scans configured library paths and creates LibraryInfo entries in 'discovered' state.
592
+ This does not load any library contents - just identifies what's available.
593
+
594
+ Use when: Refreshing library catalog, checking for new libraries, initializing
595
+ library tracking before selective loading.
596
+
597
+ Results: DiscoverLibrariesResultSuccess | DiscoverLibrariesResultFailure
598
+ """
599
+
600
+ include_sandbox: bool = True # Whether to include sandbox library in discovery
601
+
602
+
603
+ @dataclass
604
+ @PayloadRegistry.register
605
+ class DiscoverLibrariesResultSuccess(WorkflowNotAlteredMixin, ResultPayloadSuccess):
606
+ """Libraries discovered successfully."""
607
+
608
+ libraries_discovered: set[DiscoveredLibrary] # Discovered libraries with type info
609
+
610
+
611
+ @dataclass
612
+ @PayloadRegistry.register
613
+ class DiscoverLibrariesResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure):
614
+ """Library discovery failed."""
615
+
616
+
617
+ @dataclass
618
+ @PayloadRegistry.register
619
+ class EvaluateLibraryFitnessRequest(RequestPayload):
620
+ """Evaluate a library's fitness (compatibility with current engine).
621
+
622
+ Checks version compatibility and determines if the library can be loaded.
623
+ Does not actually load Python modules - just validates compatibility.
624
+
625
+ Args:
626
+ schema: The loaded LibrarySchema from metadata loading
627
+
628
+ Results: EvaluateLibraryFitnessResultSuccess | EvaluateLibraryFitnessResultFailure
629
+ """
630
+
631
+ schema: LibrarySchema
632
+
633
+
634
+ @dataclass
635
+ @PayloadRegistry.register
636
+ class EvaluateLibraryFitnessResultSuccess(WorkflowNotAlteredMixin, ResultPayloadSuccess):
637
+ """Library fitness evaluation successful.
638
+
639
+ Returns fitness and any non-fatal problems (warnings).
640
+ Caller manages their own lifecycle state.
641
+ """
642
+
643
+ fitness: LibraryManager.LibraryFitness
644
+ problems: list[LibraryProblem]
645
+
646
+
647
+ @dataclass
648
+ @PayloadRegistry.register
649
+ class EvaluateLibraryFitnessResultFailure(WorkflowNotAlteredMixin, ResultPayloadFailure):
650
+ """Library fitness evaluation failed - library is not fit for this engine.
651
+
652
+ Returns fitness and problems for caller to update their LibraryInfo.
653
+ """
654
+
655
+ fitness: LibraryManager.LibraryFitness
656
+ problems: list[LibraryProblem]
657
+
658
+
520
659
  @dataclass
521
660
  @PayloadRegistry.register
522
661
  class LoadLibrariesRequest(RequestPayload):
@@ -62,10 +62,10 @@ class FileSystemEntry:
62
62
  name: str
63
63
  path: str # Workspace-relative path (for portability)
64
64
  is_dir: bool
65
- size: int
66
- modified_time: float
67
- absolute_path: str # Absolute resolved path
68
- mime_type: str | None = None # None for directories, mimetype for files
65
+ size: int = 0 # File size in bytes (0 if not included)
66
+ modified_time: float = 0.0 # Modification timestamp (0.0 if not included)
67
+ absolute_path: str = "" # Absolute resolved path (empty if not included)
68
+ mime_type: str | None = None # None for directories, mimetype for files (None if not included)
69
69
 
70
70
 
71
71
  @dataclass
@@ -122,6 +122,10 @@ class ListDirectoryRequest(RequestPayload):
122
122
  If None, workspace constraints don't apply (e.g., cloud environments).
123
123
  pattern: Optional glob pattern to filter entries (e.g., "*.txt", "file_*.json").
124
124
  Only matches against file/directory names, not full paths.
125
+ include_size: If True, include file size in results (default: True). Set to False for faster listing.
126
+ include_modified_time: If True, include modified time in results (default: True). Set to False for faster listing.
127
+ include_mime_type: If True, include MIME type in results (default: True). Set to False for faster listing.
128
+ include_absolute_path: If True, include absolute resolved path in results (default: True). Set to False for faster listing.
125
129
 
126
130
  Results: ListDirectoryResultSuccess (with entries) | ListDirectoryResultFailure (access denied, not found)
127
131
  """
@@ -130,6 +134,10 @@ class ListDirectoryRequest(RequestPayload):
130
134
  show_hidden: bool = False
131
135
  workspace_only: bool | None = True
132
136
  pattern: str | None = None
137
+ include_size: bool = True
138
+ include_modified_time: bool = True
139
+ include_mime_type: bool = True
140
+ include_absolute_path: bool = True
133
141
 
134
142
 
135
143
  @dataclass
@@ -9,6 +9,7 @@ from .deprecated_node_warning_problem import DeprecatedNodeWarningProblem
9
9
  from .duplicate_library_problem import DuplicateLibraryProblem
10
10
  from .duplicate_node_registration_problem import DuplicateNodeRegistrationProblem
11
11
  from .engine_version_error_problem import EngineVersionErrorProblem
12
+ from .incompatible_requirements_problem import IncompatibleRequirementsProblem
12
13
  from .insufficient_disk_space_problem import InsufficientDiskSpaceProblem
13
14
  from .invalid_version_string_problem import InvalidVersionStringProblem
14
15
  from .library_json_decode_problem import LibraryJsonDecodeProblem
@@ -39,6 +40,7 @@ __all__ = [
39
40
  "DuplicateLibraryProblem",
40
41
  "DuplicateNodeRegistrationProblem",
41
42
  "EngineVersionErrorProblem",
43
+ "IncompatibleRequirementsProblem",
42
44
  "InsufficientDiskSpaceProblem",
43
45
  "InvalidVersionStringProblem",
44
46
  "LibraryJsonDecodeProblem",
@@ -0,0 +1,34 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from dataclasses import dataclass
5
+ from typing import Any
6
+
7
+ from griptape_nodes.retained_mode.managers.fitness_problems.libraries.library_problem import LibraryProblem
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ @dataclass
13
+ class IncompatibleRequirementsProblem(LibraryProblem):
14
+ """Problem indicating library requirements are not met by the current system."""
15
+
16
+ requirements: dict[str, Any]
17
+ system_capabilities: dict[str, Any]
18
+
19
+ @classmethod
20
+ def collate_problems_for_display(cls, instances: list[IncompatibleRequirementsProblem]) -> str:
21
+ """Display incompatible requirements problem.
22
+
23
+ There should only be one instance per library since each LibraryInfo
24
+ is already associated with a specific library path.
25
+ """
26
+ if len(instances) > 1:
27
+ logger.error(
28
+ "IncompatibleRequirementsProblem: Expected 1 instance but got %s. Each LibraryInfo should only have one IncompatibleRequirementsProblem.",
29
+ len(instances),
30
+ )
31
+
32
+ # Use the first instance's details
33
+ problem = instances[0]
34
+ return f"Library requirements not met. Required: {problem.requirements}, System: {problem.system_capabilities}"
@@ -2206,7 +2206,19 @@ class FlowManager:
2206
2206
  return
2207
2207
 
2208
2208
  # For each parameter, get its value from the SubflowNodeGroup and create a set value command
2209
+ class_name_prefix = start_node_type.lower()
2209
2210
  for prefixed_param_name in parameter_names:
2211
+ # Skip parameters that don't have the expected prefix for this StartFlow node.
2212
+ # These are group-level settings that control the group's behavior
2213
+ # but shouldn't be passed to the StartFlow node of each iteration.
2214
+ if not prefixed_param_name.startswith(f"{class_name_prefix}_"):
2215
+ logger.debug(
2216
+ "Skipping group-level parameter '%s' - not a StartFlow parameter (expected prefix '%s_')",
2217
+ prefixed_param_name,
2218
+ class_name_prefix,
2219
+ )
2220
+ continue
2221
+
2210
2222
  # Get the value from the SubflowNodeGroup parameter
2211
2223
  param_value = node_group_node.get_parameter_value(param_name=prefixed_param_name)
2212
2224
 
@@ -2215,7 +2227,6 @@ class FlowManager:
2215
2227
  continue
2216
2228
 
2217
2229
  # Strip the prefix to get the original parameter name for the StartFlow node
2218
- class_name_prefix = start_node_type.lower()
2219
2230
  original_param_name = prefixed_param_name.removeprefix(f"{class_name_prefix}_")
2220
2231
 
2221
2232
  # Create unique parameter UUID for this value
@@ -2542,11 +2553,22 @@ class FlowManager:
2542
2553
  details = f"Failed to kick off flow with name {flow_name}. Exception occurred: {e} "
2543
2554
  return StartFlowResultFailure(validation_exceptions=[e], result_details=details)
2544
2555
 
2556
+ if self._global_control_flow_machine:
2557
+ resolution_machine = self._global_control_flow_machine.resolution_machine
2558
+ if resolution_machine.is_errored():
2559
+ error_message = resolution_machine.get_error_message()
2560
+ result_details = f"Failed to kick off flow with name {flow_name}. Exception occurred: {error_message} "
2561
+ exception = RuntimeError(error_message)
2562
+ # Pass through the error message without adding extra wrapping
2563
+ return StartFlowResultFailure(
2564
+ validation_exceptions=[exception] if error_message else [], result_details=result_details
2565
+ )
2566
+
2545
2567
  details = f"Successfully kicked off flow with name {flow_name}"
2546
2568
 
2547
2569
  return StartFlowResultSuccess(result_details=details)
2548
2570
 
2549
- async def on_start_flow_from_node_request(self, request: StartFlowFromNodeRequest) -> ResultPayload: # noqa: C901, PLR0911
2571
+ async def on_start_flow_from_node_request(self, request: StartFlowFromNodeRequest) -> ResultPayload: # noqa: C901, PLR0911, PLR0912
2550
2572
  # which flow
2551
2573
  flow_name = request.flow_name
2552
2574
  if not flow_name:
@@ -2601,6 +2623,15 @@ class FlowManager:
2601
2623
  details = f"Failed to kick off flow with name {flow_name}. Exception occurred: {e} "
2602
2624
  return StartFlowFromNodeResultFailure(validation_exceptions=[e], result_details=details)
2603
2625
 
2626
+ if self._global_control_flow_machine:
2627
+ resolution_machine = self._global_control_flow_machine.resolution_machine
2628
+ if resolution_machine.is_errored():
2629
+ error_message = resolution_machine.get_error_message()
2630
+ # Pass through the error message without adding extra wrapping
2631
+ return StartFlowFromNodeResultFailure(
2632
+ validation_exceptions=[], result_details=error_message or "Flow execution failed"
2633
+ )
2634
+
2604
2635
  details = f"Successfully kicked off flow with name {flow_name}"
2605
2636
 
2606
2637
  return StartFlowFromNodeResultSuccess(result_details=details)
@@ -2710,7 +2741,24 @@ class FlowManager:
2710
2741
 
2711
2742
  return start_nodes + control_nodes + valid_data_nodes
2712
2743
 
2713
- async def on_start_local_subflow_request(self, request: StartLocalSubflowRequest) -> ResultPayload:
2744
+ def _validate_and_get_start_node(
2745
+ self, flow_name: str, start_node_name: str | None, flow: ControlFlow
2746
+ ) -> BaseNode | StartLocalSubflowResultFailure:
2747
+ """Validate and get the start node for subflow execution."""
2748
+ if start_node_name is None:
2749
+ start_nodes = self.get_start_nodes_in_flow(flow)
2750
+ if not start_nodes:
2751
+ details = f"Cannot start subflow '{flow_name}'. No start nodes found in flow."
2752
+ return StartLocalSubflowResultFailure(result_details=details)
2753
+ return start_nodes[0]
2754
+
2755
+ try:
2756
+ return GriptapeNodes.NodeManager().get_node_by_name(start_node_name)
2757
+ except ValueError as err:
2758
+ details = f"Cannot start subflow '{flow_name}'. Start node '{start_node_name}' not found: {err}"
2759
+ return StartLocalSubflowResultFailure(result_details=details)
2760
+
2761
+ async def on_start_local_subflow_request(self, request: StartLocalSubflowRequest) -> ResultPayload: # noqa: C901, PLR0911
2714
2762
  flow_name = request.flow_name
2715
2763
  if not flow_name:
2716
2764
  details = "Must provide flow name to start a flow."
@@ -2726,19 +2774,32 @@ class FlowManager:
2726
2774
  msg = "There must be a flow going to start a Subflow"
2727
2775
  return StartLocalSubflowResultFailure(result_details=msg)
2728
2776
 
2729
- start_node_name = request.start_node
2730
- if start_node_name is None:
2731
- start_nodes = self.get_start_nodes_in_flow(flow)
2732
- if not start_nodes:
2733
- details = f"Cannot start subflow '{flow_name}'. No start nodes found in flow."
2734
- return StartLocalSubflowResultFailure(result_details=details)
2735
- start_node = start_nodes[0]
2736
- else:
2737
- try:
2738
- start_node = GriptapeNodes.NodeManager().get_node_by_name(start_node_name)
2739
- except ValueError as err:
2740
- details = f"Cannot start subflow '{flow_name}'. Start node '{start_node_name}' not found: {err}"
2741
- return StartLocalSubflowResultFailure(result_details=details)
2777
+ start_node = self._validate_and_get_start_node(flow_name, request.start_node, flow)
2778
+ if isinstance(start_node, StartLocalSubflowResultFailure):
2779
+ return start_node
2780
+
2781
+ # Run validation before starting the subflow
2782
+ validation_result = await self.on_validate_flow_dependencies_request(
2783
+ ValidateFlowDependenciesRequest(flow_name=flow_name, flow_node_name=start_node.name if start_node else None)
2784
+ )
2785
+ if validation_result.failed():
2786
+ # Extract error details from the failed validation result
2787
+ details = (
2788
+ validation_result.result_details
2789
+ if hasattr(validation_result, "result_details")
2790
+ else f"Subflow '{flow_name}' validation failed"
2791
+ )
2792
+ return StartLocalSubflowResultFailure(result_details=details)
2793
+
2794
+ validation_result = cast("ValidateFlowDependenciesResultSuccess", validation_result)
2795
+ if not validation_result.validation_succeeded:
2796
+ # Build detailed error message with all validation exceptions
2797
+ details_lines = []
2798
+ if validation_result.exceptions:
2799
+ for exception in validation_result.exceptions:
2800
+ details_lines.append(str(exception)) # noqa: PERF401 keeping in for loop for clarity.
2801
+ details = "\n".join(details_lines) if details_lines else f"Subflow '{flow_name}' validation failed"
2802
+ return StartLocalSubflowResultFailure(result_details=details)
2742
2803
 
2743
2804
  subflow_machine = ControlFlowMachine(
2744
2805
  flow.name,
@@ -2746,7 +2807,16 @@ class FlowManager:
2746
2807
  is_isolated=True,
2747
2808
  )
2748
2809
 
2749
- await subflow_machine.start_flow(start_node)
2810
+ try:
2811
+ await subflow_machine.start_flow(start_node)
2812
+ except Exception as err:
2813
+ msg = f"Failed to run flow {flow_name}. Error: {err}"
2814
+ return StartLocalSubflowResultFailure(result_details=msg)
2815
+
2816
+ if subflow_machine.resolution_machine.is_errored():
2817
+ error_message = subflow_machine.resolution_machine.get_error_message()
2818
+ # Pass through the error message directly without wrapping
2819
+ return StartLocalSubflowResultFailure(result_details=error_message or "Subflow errored during execution")
2750
2820
 
2751
2821
  return StartLocalSubflowResultSuccess(result_details=f"Successfully executed local subflow '{flow_name}'")
2752
2822
 
@@ -3686,6 +3756,20 @@ class FlowManager:
3686
3756
  if self.check_for_existing_running_flow():
3687
3757
  await self.cancel_flow_run()
3688
3758
  raise RuntimeError(e) from e
3759
+
3760
+ if resolution_machine.is_errored():
3761
+ error_message = resolution_machine.get_error_message()
3762
+ logger.error("Node '%s' failed: %s", node.name, error_message)
3763
+ self._global_single_node_resolution = False
3764
+ self._global_control_flow_machine.context.current_nodes = []
3765
+ GriptapeNodes.EventManager().put_event(
3766
+ ExecutionGriptapeNodeEvent(
3767
+ wrapped_event=ExecutionEvent(payload=InvolvedNodesEvent(involved_nodes=[]))
3768
+ )
3769
+ )
3770
+ # Re-raise with the original error message
3771
+ raise RuntimeError(error_message or "Node resolution failed")
3772
+
3689
3773
  if resolution_machine.is_complete():
3690
3774
  self._global_single_node_resolution = False
3691
3775
  self._global_control_flow_machine.context.current_nodes = []
@@ -4014,9 +4098,38 @@ class FlowManager:
4014
4098
  queue.put(next_node)
4015
4099
  return list(processed.keys())
4016
4100
 
4017
- def is_node_connected(self, start_node: BaseNode, node: BaseNode) -> bool:
4018
- nodes = self.get_all_connected_nodes(start_node)
4019
- return node in nodes
4101
+ def is_node_connected(self, start_node: BaseNode, node: BaseNode) -> list[str]:
4102
+ """Check if node is in the forward control path from start_node, returning boundary nodes if connected.
4103
+
4104
+ Returns:
4105
+ list[str]: Names of nodes that have direct connections to 'node' and are in the forward control path,
4106
+ or empty list if not in forward path.
4107
+ """
4108
+ connections = self.get_connections()
4109
+
4110
+ # Check if node is in the forward control path from start_node
4111
+ if not connections.is_node_in_forward_control_path(start_node, node):
4112
+ return []
4113
+
4114
+ # Node is in forward path - find boundary nodes that connect to it
4115
+ boundary_nodes = []
4116
+
4117
+ # Check incoming connections to the target node
4118
+ if node.name in connections.incoming_index:
4119
+ incoming_params = connections.incoming_index[node.name]
4120
+ for connection_ids in incoming_params.values():
4121
+ for connection_id in connection_ids:
4122
+ connection = connections.connections[connection_id]
4123
+ source_node_name = connection.source_node.name
4124
+
4125
+ # Only include if source node is also in the forward control path from start_node
4126
+ if (
4127
+ connections.is_node_in_forward_control_path(start_node, connection.source_node)
4128
+ and source_node_name not in boundary_nodes
4129
+ ):
4130
+ boundary_nodes.append(source_node_name)
4131
+
4132
+ return boundary_nodes
4020
4133
 
4021
4134
  def get_node_dependencies(self, flow: ControlFlow, node: BaseNode) -> list[BaseNode]:
4022
4135
  """Get all upstream nodes that the given node depends on.