griptape-nodes 0.58.1__py3-none-any.whl → 0.59.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. griptape_nodes/bootstrap/utils/python_subprocess_executor.py +2 -2
  2. griptape_nodes/bootstrap/workflow_executors/local_session_workflow_executor.py +0 -5
  3. griptape_nodes/bootstrap/workflow_executors/local_workflow_executor.py +9 -5
  4. griptape_nodes/bootstrap/workflow_executors/subprocess_workflow_executor.py +0 -1
  5. griptape_nodes/bootstrap/workflow_executors/workflow_executor.py +1 -3
  6. griptape_nodes/bootstrap/workflow_publishers/local_workflow_publisher.py +1 -1
  7. griptape_nodes/cli/commands/init.py +53 -7
  8. griptape_nodes/cli/shared.py +1 -0
  9. griptape_nodes/common/node_executor.py +216 -40
  10. griptape_nodes/exe_types/core_types.py +46 -0
  11. griptape_nodes/exe_types/node_types.py +272 -0
  12. griptape_nodes/machines/control_flow.py +222 -16
  13. griptape_nodes/machines/dag_builder.py +212 -1
  14. griptape_nodes/machines/parallel_resolution.py +237 -4
  15. griptape_nodes/node_library/workflow_registry.py +1 -1
  16. griptape_nodes/retained_mode/events/execution_events.py +5 -4
  17. griptape_nodes/retained_mode/events/flow_events.py +17 -67
  18. griptape_nodes/retained_mode/events/parameter_events.py +122 -1
  19. griptape_nodes/retained_mode/managers/event_manager.py +17 -13
  20. griptape_nodes/retained_mode/managers/flow_manager.py +316 -573
  21. griptape_nodes/retained_mode/managers/library_manager.py +32 -20
  22. griptape_nodes/retained_mode/managers/model_manager.py +19 -8
  23. griptape_nodes/retained_mode/managers/node_manager.py +463 -3
  24. griptape_nodes/retained_mode/managers/object_manager.py +2 -2
  25. griptape_nodes/retained_mode/managers/workflow_manager.py +37 -46
  26. griptape_nodes/retained_mode/retained_mode.py +297 -3
  27. {griptape_nodes-0.58.1.dist-info → griptape_nodes-0.59.0.dist-info}/METADATA +3 -2
  28. {griptape_nodes-0.58.1.dist-info → griptape_nodes-0.59.0.dist-info}/RECORD +30 -30
  29. {griptape_nodes-0.58.1.dist-info → griptape_nodes-0.59.0.dist-info}/WHEEL +1 -1
  30. {griptape_nodes-0.58.1.dist-info → griptape_nodes-0.59.0.dist-info}/entry_points.txt +0 -0
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import logging
4
4
  import uuid
5
5
  from abc import ABC, abstractmethod
6
+ from collections.abc import Callable
6
7
  from copy import deepcopy
7
8
  from dataclasses import dataclass, field
8
9
  from enum import Enum, StrEnum, auto
@@ -754,6 +755,51 @@ class ParameterMessage(BaseNodeElement, UIOptionsMixin):
754
755
  return event_data
755
756
 
756
757
 
758
+ class DeprecationMessage(ParameterMessage):
759
+ """A specialized ParameterMessage for deprecation warnings with default warning styling."""
760
+
761
+ # Keep the same element_type as ParameterMessage so UI recognizes it
762
+ element_type: str = "ParameterMessage"
763
+
764
+ def __init__(
765
+ self,
766
+ value: str,
767
+ button_text: str,
768
+ migrate_function: Callable[[Any, Any], Any],
769
+ **kwargs,
770
+ ):
771
+ """Initialize a deprecation message with default warning styling.
772
+
773
+ Args:
774
+ value: The deprecation message text
775
+ button_text: Text for the migration button
776
+ migrate_function: Function to call when migration button is clicked
777
+ **kwargs: Additional arguments passed to ParameterMessage
778
+ """
779
+ # Set defaults for deprecation messages
780
+ kwargs.setdefault("variant", "warning")
781
+ kwargs.setdefault("full_width", True)
782
+
783
+ # Add the button trait
784
+ from griptape_nodes.traits.button import Button
785
+
786
+ kwargs.setdefault("traits", {})
787
+ kwargs["traits"][Button(label=button_text, icon="plus", variant="secondary", on_click=migrate_function)] = None
788
+
789
+ super().__init__(value=value, **kwargs)
790
+
791
+ def to_dict(self) -> dict:
792
+ """Override to_dict to use element_type instead of class name.
793
+
794
+ The base to_dict() method uses self.__class__.__name__ which would return
795
+ "DeprecationMessage", but the UI expects element_type to be "ParameterMessage"
796
+ to recognize it as a valid ParameterMessage element.
797
+ """
798
+ data = super().to_dict()
799
+ data["element_type"] = self.element_type # Use "ParameterMessage" not "DeprecationMessage"
800
+ return data
801
+
802
+
757
803
  class ParameterGroup(BaseNodeElement, UIOptionsMixin):
758
804
  """UI element for a group of parameters."""
759
805
 
@@ -40,6 +40,7 @@ from griptape_nodes.traits.options import Options
40
40
  from griptape_nodes.utils import async_utils
41
41
 
42
42
  if TYPE_CHECKING:
43
+ from griptape_nodes.exe_types.connections import Connections
43
44
  from griptape_nodes.exe_types.core_types import NodeMessagePayload
44
45
  from griptape_nodes.node_library.library_registry import LibraryNameAndVersion
45
46
 
@@ -181,6 +182,15 @@ class BaseNode(ABC):
181
182
  ui_options={"hide": True},
182
183
  )
183
184
  self.add_parameter(self.execution_environment)
185
+ self.node_group = Parameter(
186
+ name="job_group",
187
+ tooltip="Groupings of multiple nodes to send up as a Deadline Cloud job.",
188
+ type=ParameterTypeBuiltin.STR,
189
+ allowed_modes={ParameterMode.PROPERTY},
190
+ default_value="",
191
+ ui_options={"hide": True},
192
+ )
193
+ self.add_parameter(self.node_group)
184
194
 
185
195
  @property
186
196
  def state(self) -> NodeResolutionState:
@@ -1829,6 +1839,268 @@ class ErrorProxyNode(BaseNode):
1829
1839
  return None
1830
1840
 
1831
1841
 
1842
+ @dataclass
1843
+ class NodeGroup:
1844
+ """Represents a group of nodes that should be executed together in parallel.
1845
+
1846
+ Nodes in a group are identified by having the same non-empty value in their
1847
+ node_group parameter. During DAG resolution, grouped nodes are replaced with
1848
+ a single NodeGroupProxyNode that represents them in the execution graph.
1849
+
1850
+ Attributes:
1851
+ group_id: Unique identifier for this group (value from node_group parameter)
1852
+ nodes: Set of BaseNode instances that belong to this group
1853
+ internal_connections: Connections between nodes within the group
1854
+ external_incoming_connections: Connections from outside nodes into the group
1855
+ external_outgoing_connections: Connections from group nodes to outside nodes
1856
+ """
1857
+
1858
+ group_id: str
1859
+ nodes: dict[str, BaseNode] = field(default_factory=dict)
1860
+ internal_connections: list[Connection] = field(default_factory=list)
1861
+ external_incoming_connections: list[Connection] = field(default_factory=list)
1862
+ external_outgoing_connections: list[Connection] = field(default_factory=list)
1863
+ # Store original node references before remapping to proxy (for cleanup)
1864
+ original_incoming_targets: dict[int, BaseNode] = field(default_factory=dict) # conn_id -> original target
1865
+ original_outgoing_sources: dict[int, BaseNode] = field(default_factory=dict) # conn_id -> original source
1866
+
1867
+ def add_node(self, node: BaseNode) -> None:
1868
+ """Add a node to this group."""
1869
+ self.nodes[node.name] = node
1870
+
1871
+ def validate_no_intermediate_nodes(self, all_connections: dict[int, Connection]) -> None:
1872
+ """Validate that no ungrouped nodes exist between grouped nodes.
1873
+
1874
+ This method checks the dependency graph to ensure that all nodes that lie
1875
+ on paths between grouped nodes are also part of the group. If ungrouped
1876
+ nodes are found between grouped nodes, this indicates a logical error in
1877
+ the group definition.
1878
+
1879
+ Args:
1880
+ all_connections: Dictionary mapping connection IDs to Connection objects
1881
+
1882
+ Raises:
1883
+ ValueError: If ungrouped nodes are found between grouped nodes
1884
+ """
1885
+ from griptape_nodes.exe_types.connections import Connections
1886
+
1887
+ # Build a Connections object for traversal
1888
+ connections = Connections()
1889
+ connections.connections = all_connections
1890
+
1891
+ # Rebuild indices for efficient lookup
1892
+ for conn_id, conn in all_connections.items():
1893
+ connections.outgoing_index.setdefault(conn.source_node.name, {}).setdefault(
1894
+ conn.source_parameter.name, []
1895
+ ).append(conn_id)
1896
+ connections.incoming_index.setdefault(conn.target_node.name, {}).setdefault(
1897
+ conn.target_parameter.name, []
1898
+ ).append(conn_id)
1899
+
1900
+ # Check each pair of nodes in the group
1901
+ for node_a in self.nodes.values():
1902
+ for node_b in self.nodes.values():
1903
+ if node_a == node_b:
1904
+ continue
1905
+
1906
+ # Check if there's a path from node_a to node_b
1907
+ intermediate_nodes = self._find_intermediate_nodes(node_a, node_b, connections)
1908
+
1909
+ # Check if any intermediate nodes are not in the group
1910
+ ungrouped_intermediates = [n for n in intermediate_nodes if n not in self.nodes]
1911
+
1912
+ if ungrouped_intermediates:
1913
+ ungrouped_names = [n.name for n in ungrouped_intermediates]
1914
+ msg = (
1915
+ f"Invalid node group '{self.group_id}': Found ungrouped nodes between grouped nodes. "
1916
+ f"Ungrouped nodes {ungrouped_names} exist on the path from '{node_a.name}' to '{node_b.name}'. "
1917
+ f"All nodes on paths between grouped nodes must be part of the same group."
1918
+ )
1919
+ raise ValueError(msg)
1920
+
1921
+ def _find_intermediate_nodes( # noqa: C901
1922
+ self, start_node: BaseNode, end_node: BaseNode, connections: Connections
1923
+ ) -> set[BaseNode]:
1924
+ """Find all nodes on paths between start_node and end_node (excluding endpoints).
1925
+
1926
+ Uses BFS to explore all paths from start_node to end_node and collects
1927
+ all nodes encountered (except start and end nodes themselves).
1928
+
1929
+ Args:
1930
+ start_node: Starting node for path search
1931
+ end_node: Target node for path search
1932
+ connections: Connections object for graph traversal
1933
+
1934
+ Returns:
1935
+ Set of nodes found on paths between start and end (excluding endpoints)
1936
+ """
1937
+ if start_node.name not in connections.outgoing_index:
1938
+ return set()
1939
+
1940
+ visited = set()
1941
+ intermediate = set()
1942
+ queue = [(start_node, [start_node])]
1943
+
1944
+ while queue:
1945
+ current_node, path = queue.pop(0)
1946
+
1947
+ if current_node.name in visited:
1948
+ continue
1949
+ visited.add(current_node.name)
1950
+
1951
+ # Process outgoing connections from current node
1952
+ if current_node.name not in connections.outgoing_index:
1953
+ continue
1954
+
1955
+ for conn_ids in connections.outgoing_index[current_node.name].values():
1956
+ for conn_id in conn_ids:
1957
+ if conn_id not in connections.connections:
1958
+ continue
1959
+
1960
+ conn = connections.connections[conn_id]
1961
+ next_node = conn.target_node
1962
+
1963
+ # If we reached the end node, record intermediate nodes
1964
+ if next_node == end_node:
1965
+ for node in path[1:]:
1966
+ intermediate.add(node)
1967
+ continue
1968
+
1969
+ # Continue exploring if not already visited
1970
+ if next_node.name not in visited:
1971
+ queue.append((next_node, [*path, next_node]))
1972
+
1973
+ return intermediate
1974
+
1975
+
1976
+ class NodeGroupProxyNode(BaseNode):
1977
+ """Proxy node that represents a group of nodes during DAG execution.
1978
+
1979
+ This node acts as a single execution unit for a group of nodes that should
1980
+ be executed in parallel. When the DAG executor encounters this proxy node,
1981
+ it passes the entire NodeGroup to the NodeExecutor which handles parallel
1982
+ execution of all grouped nodes.
1983
+
1984
+ The proxy node has parameters that mirror the external connections to/from
1985
+ the group, allowing it to seamlessly integrate into the DAG structure.
1986
+
1987
+ Attributes:
1988
+ node_group: The NodeGroup instance this proxy represents
1989
+ """
1990
+
1991
+ def __init__(
1992
+ self,
1993
+ name: str,
1994
+ node_group: NodeGroup,
1995
+ metadata: dict[Any, Any] | None = None,
1996
+ ) -> None:
1997
+ super().__init__(name, metadata)
1998
+ self.node_group_data = node_group
1999
+
2000
+ # Track mapping from proxy parameter name to (original_node, original_param_name)
2001
+ self._proxy_param_to_node_param: dict[str, tuple[BaseNode, str]] = {}
2002
+ execution_type = set()
2003
+ for node in node_group.nodes.values():
2004
+ execution_type.add(node.get_parameter_value(node.execution_environment.name))
2005
+ # TODO: Set this by group in the UI, not set on the node itself. https://github.com/griptape-ai/griptape-vsl-gui/issues/1429
2006
+ if len(execution_type) > 1:
2007
+ # Hoping this check can be removed by UI updates.
2008
+ # For now, we are setting execution type individually on a parameters in all three of the nodes. we want their execution types to all be matching, or we fail.
2009
+ msg = f"Node group '{node_group.group_id}' has nodes with multiple execution types: {execution_type}"
2010
+ raise ValueError(msg)
2011
+ self.set_parameter_value(self.execution_environment.name, execution_type.pop())
2012
+ # Note: Proxy parameters are created AFTER connection remapping in control_flow.py
2013
+ # via explicit call to create_proxy_parameters()
2014
+
2015
+ def create_proxy_parameters(self) -> None:
2016
+ """Create parameters on the proxy that match external connections.
2017
+
2018
+ For each external incoming connection, create an input parameter with name
2019
+ format: {sanitized_node_name}__{param_name}. This allows the proxy to
2020
+ forward parameter values to the correct original node.
2021
+
2022
+ For each external outgoing connection, create an output parameter.
2023
+ This allows the proxy to integrate seamlessly into the DAG.
2024
+ """
2025
+ # Track created parameters to avoid duplicates
2026
+ created_params = set()
2027
+
2028
+ # Create input parameters for external incoming connections
2029
+ for conn in self.node_group_data.external_incoming_connections:
2030
+ conn_id = id(conn)
2031
+ # Get the original target node from saved mapping (before it was remapped to proxy)
2032
+ target_node = self.node_group_data.original_incoming_targets.get(conn_id)
2033
+ if target_node is None:
2034
+ # Fallback if not found (shouldn't happen)
2035
+ msg = f"Failed to find target node for incoming connection with ID: {conn_id}"
2036
+ raise ValueError(msg)
2037
+ target_param = conn.target_parameter
2038
+
2039
+ # Create proxy parameter name: {sanitized_node_name}__{param_name}
2040
+ sanitized_node_name = target_node.name.replace(" ", "_")
2041
+ proxy_param_name = f"{sanitized_node_name}__{target_param.name}"
2042
+
2043
+ if proxy_param_name not in created_params:
2044
+ proxy_param = Parameter(
2045
+ name=proxy_param_name,
2046
+ type=target_param.type,
2047
+ input_types=target_param.input_types,
2048
+ output_type=target_param.output_type,
2049
+ tooltip=f"Proxy input for {target_node.name}.{target_param.name}",
2050
+ allowed_modes={ParameterMode.INPUT},
2051
+ )
2052
+ self.add_parameter(proxy_param)
2053
+ created_params.add(proxy_param_name)
2054
+
2055
+ # Track mapping from proxy param to original node/param
2056
+ self._proxy_param_to_node_param[proxy_param_name] = (target_node, target_param.name)
2057
+
2058
+ # Create output parameters for external outgoing connections
2059
+ for conn in self.node_group_data.external_outgoing_connections:
2060
+ conn_id = id(conn)
2061
+ # Get the original source node from saved mapping (before it was remapped to proxy)
2062
+ source_node = self.node_group_data.original_outgoing_sources.get(conn_id)
2063
+ if source_node is None:
2064
+ # Fallback if not found (shouldn't happen)
2065
+ continue
2066
+
2067
+ source_param = conn.source_parameter
2068
+
2069
+ # Create proxy parameter name: {sanitized_node_name}__{param_name}
2070
+ sanitized_node_name = source_node.name.replace(" ", "_")
2071
+ proxy_param_name = f"{sanitized_node_name}__{source_param.name}"
2072
+
2073
+ if proxy_param_name not in created_params:
2074
+ proxy_param = Parameter(
2075
+ name=proxy_param_name,
2076
+ type=source_param.type,
2077
+ input_types=source_param.input_types,
2078
+ output_type=source_param.output_type,
2079
+ tooltip=f"Proxy output for {source_node.name}.{source_param.name}",
2080
+ allowed_modes={ParameterMode.OUTPUT},
2081
+ )
2082
+ self.add_parameter(proxy_param)
2083
+ created_params.add(proxy_param_name)
2084
+
2085
+ # Track mapping from proxy param to original node/param
2086
+ self._proxy_param_to_node_param[proxy_param_name] = (source_node, source_param.name)
2087
+
2088
+ async def aprocess(self) -> None:
2089
+ """Execute all nodes in the group in parallel.
2090
+
2091
+ This method is called by the DAG executor. It executes all nodes in the
2092
+ group concurrently using asyncio.gather and handles propagating input
2093
+ values from the proxy to the grouped nodes.
2094
+ """
2095
+ msg = "NodeGroupProxyNode should not be executed locally."
2096
+ raise NotImplementedError(msg)
2097
+
2098
+ def process(self) -> Any:
2099
+ """Synchronous process method - not used for proxy nodes."""
2100
+ msg = "NodeGroupProxyNode should use aprocess() for async execution."
2101
+ raise NotImplementedError(msg)
2102
+
2103
+
1832
2104
  class Connection:
1833
2105
  source_node: BaseNode
1834
2106
  target_node: BaseNode