vellum-ai 1.7.4__py3-none-any.whl → 1.7.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/__init__.py +2 -0
- vellum/client/core/client_wrapper.py +2 -2
- vellum/client/reference.md +95 -0
- vellum/client/resources/workflow_deployments/client.py +111 -0
- vellum/client/resources/workflow_deployments/raw_client.py +121 -0
- vellum/client/types/__init__.py +2 -0
- vellum/client/types/paginated_workflow_deployment_release_list.py +30 -0
- vellum/client/types/vellum_error_code_enum.py +2 -0
- vellum/client/types/vellum_sdk_error_code_enum.py +2 -0
- vellum/client/types/workflow_execution_event_error_code.py +2 -0
- vellum/types/paginated_workflow_deployment_release_list.py +3 -0
- vellum/workflows/edges/__init__.py +2 -0
- vellum/workflows/edges/trigger_edge.py +67 -0
- vellum/workflows/events/tests/test_event.py +40 -0
- vellum/workflows/events/workflow.py +15 -3
- vellum/workflows/graph/graph.py +93 -0
- vellum/workflows/graph/tests/test_graph.py +167 -0
- vellum/workflows/nodes/bases/base.py +28 -9
- vellum/workflows/nodes/displayable/search_node/node.py +2 -1
- vellum/workflows/nodes/displayable/search_node/tests/test_node.py +14 -0
- vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py +7 -1
- vellum/workflows/nodes/displayable/subworkflow_deployment_node/tests/test_node.py +1 -1
- vellum/workflows/nodes/displayable/tool_calling_node/node.py +1 -1
- vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py +54 -0
- vellum/workflows/nodes/displayable/tool_calling_node/utils.py +27 -25
- vellum/workflows/ports/port.py +11 -0
- vellum/workflows/runner/runner.py +30 -40
- vellum/workflows/triggers/__init__.py +5 -0
- vellum/workflows/triggers/base.py +125 -0
- vellum/workflows/triggers/integration.py +62 -0
- vellum/workflows/triggers/manual.py +37 -0
- vellum/workflows/triggers/tests/__init__.py +1 -0
- vellum/workflows/triggers/tests/test_integration.py +102 -0
- vellum/workflows/workflows/base.py +26 -12
- {vellum_ai-1.7.4.dist-info → vellum_ai-1.7.6.dist-info}/METADATA +1 -1
- {vellum_ai-1.7.4.dist-info → vellum_ai-1.7.6.dist-info}/RECORD +48 -38
- vellum_cli/push.py +1 -5
- vellum_cli/tests/test_push.py +86 -0
- vellum_ee/assets/node-definitions.json +1 -1
- vellum_ee/workflows/display/base.py +26 -1
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_serialization.py +1 -1
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py +1 -1
- vellum_ee/workflows/display/tests/workflow_serialization/test_manual_trigger_serialization.py +110 -0
- vellum_ee/workflows/display/workflows/base_workflow_display.py +72 -10
- vellum_ee/workflows/tests/test_server.py +40 -1
- {vellum_ai-1.7.4.dist-info → vellum_ai-1.7.6.dist-info}/LICENSE +0 -0
- {vellum_ai-1.7.4.dist-info → vellum_ai-1.7.6.dist-info}/WHEEL +0 -0
- {vellum_ai-1.7.4.dist-info → vellum_ai-1.7.6.dist-info}/entry_points.txt +0 -0
@@ -74,6 +74,7 @@ from vellum.workflows.references import ExternalInputReference, OutputReference
|
|
74
74
|
from vellum.workflows.references.state_value import StateValueReference
|
75
75
|
from vellum.workflows.state.base import BaseState
|
76
76
|
from vellum.workflows.state.delta import StateDelta
|
77
|
+
from vellum.workflows.types.core import CancelSignal
|
77
78
|
from vellum.workflows.types.generics import InputsType, OutputsType, StateType
|
78
79
|
|
79
80
|
if TYPE_CHECKING:
|
@@ -103,7 +104,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
103
104
|
entrypoint_nodes: Optional[RunFromNodeArg] = None,
|
104
105
|
external_inputs: Optional[ExternalInputsArg] = None,
|
105
106
|
previous_execution_id: Optional[Union[str, UUID]] = None,
|
106
|
-
cancel_signal: Optional[
|
107
|
+
cancel_signal: Optional[CancelSignal] = None,
|
107
108
|
node_output_mocks: Optional[MockNodeExecutionArg] = None,
|
108
109
|
max_concurrency: Optional[int] = None,
|
109
110
|
init_execution_context: Optional[ExecutionContext] = None,
|
@@ -465,45 +466,11 @@ class WorkflowRunner(Generic[StateType]):
|
|
465
466
|
parent=execution.parent_context,
|
466
467
|
)
|
467
468
|
except NodeException as e:
|
468
|
-
|
469
|
-
captured_stacktrace = traceback.format_exc()
|
470
|
-
|
471
|
-
yield NodeExecutionRejectedEvent(
|
472
|
-
trace_id=execution.trace_id,
|
473
|
-
span_id=span_id,
|
474
|
-
body=NodeExecutionRejectedBody(
|
475
|
-
node_definition=node.__class__,
|
476
|
-
error=e.error,
|
477
|
-
stacktrace=captured_stacktrace,
|
478
|
-
),
|
479
|
-
parent=execution.parent_context,
|
480
|
-
)
|
469
|
+
yield self._handle_run_node_exception(e, "Node Exception", execution, span_id, node)
|
481
470
|
except WorkflowInitializationException as e:
|
482
|
-
|
483
|
-
captured_stacktrace = traceback.format_exc()
|
484
|
-
yield NodeExecutionRejectedEvent(
|
485
|
-
trace_id=execution.trace_id,
|
486
|
-
span_id=span_id,
|
487
|
-
body=NodeExecutionRejectedBody(
|
488
|
-
node_definition=node.__class__,
|
489
|
-
error=e.error,
|
490
|
-
stacktrace=captured_stacktrace,
|
491
|
-
),
|
492
|
-
parent=execution.parent_context,
|
493
|
-
)
|
471
|
+
yield self._handle_run_node_exception(e, "Workflow Initialization Exception", execution, span_id, node)
|
494
472
|
except InvalidExpressionException as e:
|
495
|
-
|
496
|
-
captured_stacktrace = traceback.format_exc()
|
497
|
-
yield NodeExecutionRejectedEvent(
|
498
|
-
trace_id=execution.trace_id,
|
499
|
-
span_id=span_id,
|
500
|
-
body=NodeExecutionRejectedBody(
|
501
|
-
node_definition=node.__class__,
|
502
|
-
error=e.error,
|
503
|
-
stacktrace=captured_stacktrace,
|
504
|
-
),
|
505
|
-
parent=execution.parent_context,
|
506
|
-
)
|
473
|
+
yield self._handle_run_node_exception(e, "Invalid Expression Exception", execution, span_id, node)
|
507
474
|
except Exception as e:
|
508
475
|
error_message = self._parse_error_message(e)
|
509
476
|
if error_message is None:
|
@@ -528,6 +495,28 @@ class WorkflowRunner(Generic[StateType]):
|
|
528
495
|
|
529
496
|
logger.debug(f"Finished running node: {node.__class__.__name__}")
|
530
497
|
|
498
|
+
def _handle_run_node_exception(
|
499
|
+
self,
|
500
|
+
exception: Union[NodeException, WorkflowInitializationException, InvalidExpressionException],
|
501
|
+
prefix: str,
|
502
|
+
execution: ExecutionContext,
|
503
|
+
span_id: UUID,
|
504
|
+
node: BaseNode[StateType],
|
505
|
+
) -> NodeExecutionRejectedEvent:
|
506
|
+
logger.info(f"{prefix}: {exception}")
|
507
|
+
captured_stacktrace = traceback.format_exc()
|
508
|
+
|
509
|
+
return NodeExecutionRejectedEvent(
|
510
|
+
trace_id=execution.trace_id,
|
511
|
+
span_id=span_id,
|
512
|
+
body=NodeExecutionRejectedBody(
|
513
|
+
node_definition=node.__class__,
|
514
|
+
error=exception.error,
|
515
|
+
stacktrace=captured_stacktrace,
|
516
|
+
),
|
517
|
+
parent=execution.parent_context,
|
518
|
+
)
|
519
|
+
|
531
520
|
def _parse_error_message(self, exception: Exception) -> Optional[str]:
|
532
521
|
try:
|
533
522
|
_, _, tb = sys.exc_info()
|
@@ -799,13 +788,14 @@ class WorkflowRunner(Generic[StateType]):
|
|
799
788
|
parent=self._execution_context.parent_context,
|
800
789
|
)
|
801
790
|
|
802
|
-
def _fulfill_workflow_event(self, outputs: OutputsType) -> WorkflowExecutionFulfilledEvent:
|
791
|
+
def _fulfill_workflow_event(self, outputs: OutputsType, final_state: StateType) -> WorkflowExecutionFulfilledEvent:
|
803
792
|
return WorkflowExecutionFulfilledEvent(
|
804
793
|
trace_id=self._execution_context.trace_id,
|
805
794
|
span_id=self._initial_state.meta.span_id,
|
806
795
|
body=WorkflowExecutionFulfilledBody(
|
807
796
|
workflow_definition=self.workflow.__class__,
|
808
797
|
outputs=outputs,
|
798
|
+
final_state=final_state,
|
809
799
|
),
|
810
800
|
parent=self._execution_context.parent_context,
|
811
801
|
)
|
@@ -961,7 +951,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
961
951
|
descriptor.instance.resolve(final_state),
|
962
952
|
)
|
963
953
|
|
964
|
-
self._workflow_event_outer_queue.put(self._fulfill_workflow_event(fulfilled_outputs))
|
954
|
+
self._workflow_event_outer_queue.put(self._fulfill_workflow_event(fulfilled_outputs, final_state))
|
965
955
|
|
966
956
|
def _run_background_thread(self) -> None:
|
967
957
|
state_class = self.workflow.get_state_class()
|
@@ -0,0 +1,125 @@
|
|
1
|
+
from abc import ABC, ABCMeta
|
2
|
+
from typing import TYPE_CHECKING, Any, Type, cast
|
3
|
+
|
4
|
+
if TYPE_CHECKING:
|
5
|
+
from vellum.workflows.graph.graph import Graph, GraphTarget
|
6
|
+
|
7
|
+
|
8
|
+
class BaseTriggerMeta(ABCMeta):
|
9
|
+
"""
|
10
|
+
Metaclass for BaseTrigger that enables class-level >> operator.
|
11
|
+
|
12
|
+
This allows triggers to be used at the class level, similar to nodes:
|
13
|
+
ManualTrigger >> MyNode # Class-level, no instantiation
|
14
|
+
"""
|
15
|
+
|
16
|
+
def __rshift__(cls, other: "GraphTarget") -> "Graph": # type: ignore[misc]
|
17
|
+
"""
|
18
|
+
Enable Trigger class >> Node syntax (class-level only).
|
19
|
+
|
20
|
+
Args:
|
21
|
+
other: The target to connect to - can be a Node, Graph, or set of Nodes
|
22
|
+
|
23
|
+
Returns:
|
24
|
+
Graph: A graph object with trigger edges
|
25
|
+
|
26
|
+
Examples:
|
27
|
+
ManualTrigger >> MyNode
|
28
|
+
ManualTrigger >> {NodeA, NodeB}
|
29
|
+
ManualTrigger >> (NodeA >> NodeB)
|
30
|
+
"""
|
31
|
+
from vellum.workflows.edges.trigger_edge import TriggerEdge
|
32
|
+
from vellum.workflows.graph.graph import Graph
|
33
|
+
from vellum.workflows.nodes.bases.base import BaseNode as BaseNodeClass
|
34
|
+
|
35
|
+
# Cast cls to the proper type for TriggerEdge
|
36
|
+
trigger_cls = cast("Type[BaseTrigger]", cls)
|
37
|
+
|
38
|
+
if isinstance(other, set):
|
39
|
+
# Trigger >> {NodeA, NodeB}
|
40
|
+
trigger_edges = []
|
41
|
+
graph_items = []
|
42
|
+
for item in other:
|
43
|
+
if isinstance(item, type) and issubclass(item, BaseNodeClass):
|
44
|
+
trigger_edges.append(TriggerEdge(trigger_cls, item))
|
45
|
+
elif isinstance(item, Graph):
|
46
|
+
# Trigger >> {Graph1, Graph2}
|
47
|
+
graph_items.append(item)
|
48
|
+
for entrypoint in item.entrypoints:
|
49
|
+
trigger_edges.append(TriggerEdge(trigger_cls, entrypoint))
|
50
|
+
else:
|
51
|
+
raise TypeError(
|
52
|
+
f"Cannot connect trigger to {type(item).__name__}. " f"Expected BaseNode or Graph in set."
|
53
|
+
)
|
54
|
+
|
55
|
+
result_graph = Graph.from_trigger_edges(trigger_edges)
|
56
|
+
|
57
|
+
for graph_item in graph_items:
|
58
|
+
result_graph._extend_edges(graph_item.edges)
|
59
|
+
result_graph._terminals.update(graph_item._terminals)
|
60
|
+
for existing_trigger_edge in graph_item._trigger_edges:
|
61
|
+
if existing_trigger_edge not in result_graph._trigger_edges:
|
62
|
+
result_graph._trigger_edges.append(existing_trigger_edge)
|
63
|
+
|
64
|
+
return result_graph
|
65
|
+
|
66
|
+
elif isinstance(other, Graph):
|
67
|
+
# Trigger >> Graph
|
68
|
+
edges = [TriggerEdge(trigger_cls, entrypoint) for entrypoint in other.entrypoints]
|
69
|
+
result_graph = Graph.from_trigger_edges(edges)
|
70
|
+
# Also include the edges from the original graph
|
71
|
+
result_graph._extend_edges(other.edges)
|
72
|
+
result_graph._terminals = other._terminals
|
73
|
+
return result_graph
|
74
|
+
|
75
|
+
elif isinstance(other, type) and issubclass(other, BaseNodeClass):
|
76
|
+
# Trigger >> Node
|
77
|
+
edge = TriggerEdge(trigger_cls, other)
|
78
|
+
return Graph.from_trigger_edge(edge)
|
79
|
+
|
80
|
+
else:
|
81
|
+
raise TypeError(
|
82
|
+
f"Cannot connect trigger to {type(other).__name__}. " f"Expected BaseNode, Graph, or set of BaseNodes."
|
83
|
+
)
|
84
|
+
|
85
|
+
def __rrshift__(cls, other: Any) -> "Graph":
|
86
|
+
"""
|
87
|
+
Prevent Node >> Trigger class syntax.
|
88
|
+
|
89
|
+
Raises:
|
90
|
+
TypeError: Always, as this operation is not allowed
|
91
|
+
"""
|
92
|
+
raise TypeError(
|
93
|
+
f"Cannot create edge targeting trigger {cls.__name__}. "
|
94
|
+
f"Triggers must be at the start of a graph path, not as targets. "
|
95
|
+
f"Did you mean: {cls.__name__} >> {other.__name__ if hasattr(other, '__name__') else other}?"
|
96
|
+
)
|
97
|
+
|
98
|
+
|
99
|
+
class BaseTrigger(ABC, metaclass=BaseTriggerMeta):
|
100
|
+
"""
|
101
|
+
Base class for workflow triggers - first-class graph elements.
|
102
|
+
|
103
|
+
Triggers define how and when a workflow execution is initiated. They are integrated
|
104
|
+
into the workflow graph using the >> operator and can connect to nodes at the class level.
|
105
|
+
|
106
|
+
Examples:
|
107
|
+
# Class-level usage (consistent with nodes)
|
108
|
+
ManualTrigger >> MyNode
|
109
|
+
ManualTrigger >> {NodeA, NodeB}
|
110
|
+
ManualTrigger >> (NodeA >> NodeB)
|
111
|
+
|
112
|
+
Subclass Hierarchy:
|
113
|
+
- ManualTrigger: Explicit workflow invocation (default)
|
114
|
+
- IntegrationTrigger: External service triggers (base for Slack, GitHub, etc.)
|
115
|
+
- ScheduledTrigger: Time-based triggers with cron/interval schedules
|
116
|
+
|
117
|
+
Important:
|
118
|
+
Triggers can only appear at the start of graph paths. Attempting to create
|
119
|
+
edges targeting triggers (Node >> Trigger) will raise a TypeError.
|
120
|
+
|
121
|
+
Note:
|
122
|
+
Like nodes, triggers work at the class level only. Do not instantiate triggers.
|
123
|
+
"""
|
124
|
+
|
125
|
+
pass
|
@@ -0,0 +1,62 @@
|
|
1
|
+
from abc import ABC
|
2
|
+
from typing import ClassVar, Optional
|
3
|
+
|
4
|
+
from vellum.workflows.outputs.base import BaseOutputs
|
5
|
+
from vellum.workflows.triggers.base import BaseTrigger
|
6
|
+
|
7
|
+
|
8
|
+
class IntegrationTrigger(BaseTrigger, ABC):
|
9
|
+
"""
|
10
|
+
Base class for integration-based triggers (Slack, Email, etc.).
|
11
|
+
|
12
|
+
Integration triggers:
|
13
|
+
- Are initiated by external events (webhooks, API calls)
|
14
|
+
- Produce outputs that downstream nodes can reference
|
15
|
+
- Require configuration (auth, webhooks, etc.)
|
16
|
+
|
17
|
+
Examples:
|
18
|
+
# Define an integration trigger
|
19
|
+
class MyIntegrationTrigger(IntegrationTrigger):
|
20
|
+
class Outputs(IntegrationTrigger.Outputs):
|
21
|
+
data: str
|
22
|
+
|
23
|
+
@classmethod
|
24
|
+
def process_event(cls, event_data: dict):
|
25
|
+
return cls.Outputs(data=event_data.get("data", ""))
|
26
|
+
|
27
|
+
# Use in workflow
|
28
|
+
class MyWorkflow(BaseWorkflow):
|
29
|
+
graph = MyIntegrationTrigger >> ProcessNode
|
30
|
+
|
31
|
+
Note:
|
32
|
+
Unlike ManualTrigger, integration triggers provide structured outputs
|
33
|
+
that downstream nodes can reference directly via Outputs.
|
34
|
+
"""
|
35
|
+
|
36
|
+
class Outputs(BaseOutputs):
|
37
|
+
"""Base outputs for integration triggers."""
|
38
|
+
|
39
|
+
pass
|
40
|
+
|
41
|
+
# Configuration that can be set at runtime
|
42
|
+
config: ClassVar[Optional[dict]] = None
|
43
|
+
|
44
|
+
@classmethod
|
45
|
+
def process_event(cls, event_data: dict) -> "IntegrationTrigger.Outputs":
|
46
|
+
"""
|
47
|
+
Process incoming webhook/event data and return trigger outputs.
|
48
|
+
|
49
|
+
This method should be implemented by subclasses to parse external
|
50
|
+
event payloads (e.g., Slack webhooks, email notifications) into
|
51
|
+
structured trigger outputs.
|
52
|
+
|
53
|
+
Args:
|
54
|
+
event_data: Raw event data from the external system
|
55
|
+
|
56
|
+
Returns:
|
57
|
+
Trigger outputs containing parsed event data
|
58
|
+
|
59
|
+
Raises:
|
60
|
+
NotImplementedError: If subclass doesn't implement this method
|
61
|
+
"""
|
62
|
+
raise NotImplementedError(f"{cls.__name__} must implement process_event() method to handle external events")
|
@@ -0,0 +1,37 @@
|
|
1
|
+
from vellum.workflows.triggers.base import BaseTrigger
|
2
|
+
|
3
|
+
|
4
|
+
class ManualTrigger(BaseTrigger):
|
5
|
+
"""
|
6
|
+
Default trigger representing explicit workflow invocation.
|
7
|
+
|
8
|
+
ManualTrigger is used when workflows are explicitly invoked via:
|
9
|
+
- workflow.run() method calls
|
10
|
+
- workflow.stream() method calls
|
11
|
+
- API calls to execute the workflow
|
12
|
+
|
13
|
+
This is the default trigger for all workflows. When no trigger is specified
|
14
|
+
in a workflow's graph definition, ManualTrigger is implicitly added.
|
15
|
+
|
16
|
+
Examples:
|
17
|
+
# Explicit ManualTrigger (equivalent to implicit)
|
18
|
+
class MyWorkflow(BaseWorkflow):
|
19
|
+
graph = ManualTrigger >> MyNode
|
20
|
+
|
21
|
+
# Implicit ManualTrigger (normalized to above)
|
22
|
+
class MyWorkflow(BaseWorkflow):
|
23
|
+
graph = MyNode
|
24
|
+
|
25
|
+
Characteristics:
|
26
|
+
- Provides no trigger-specific inputs
|
27
|
+
- Always ready to execute when invoked
|
28
|
+
- Simplest trigger type with no configuration
|
29
|
+
- Default behavior for backward compatibility
|
30
|
+
|
31
|
+
Comparison with other triggers:
|
32
|
+
- IntegrationTrigger: Responds to external events (webhooks, API calls)
|
33
|
+
- ScheduledTrigger: Executes based on time/schedule configuration
|
34
|
+
- ManualTrigger: Executes when explicitly called
|
35
|
+
"""
|
36
|
+
|
37
|
+
pass
|
@@ -0,0 +1 @@
|
|
1
|
+
# Tests for workflow triggers
|
@@ -0,0 +1,102 @@
|
|
1
|
+
"""Tests for IntegrationTrigger base class."""
|
2
|
+
|
3
|
+
import pytest
|
4
|
+
|
5
|
+
from vellum.workflows.nodes.bases.base import BaseNode
|
6
|
+
from vellum.workflows.triggers.integration import IntegrationTrigger
|
7
|
+
|
8
|
+
|
9
|
+
def test_integration_trigger__is_abstract():
|
10
|
+
"""IntegrationTrigger cannot be instantiated directly (ABC)."""
|
11
|
+
# WHEN we try to call process_event on IntegrationTrigger directly
|
12
|
+
# THEN it raises NotImplementedError
|
13
|
+
with pytest.raises(NotImplementedError, match="must implement process_event"):
|
14
|
+
IntegrationTrigger.process_event({})
|
15
|
+
|
16
|
+
|
17
|
+
def test_integration_trigger__outputs_class_exists():
|
18
|
+
"""IntegrationTrigger has Outputs class."""
|
19
|
+
# GIVEN IntegrationTrigger
|
20
|
+
# THEN it has an Outputs class
|
21
|
+
assert hasattr(IntegrationTrigger, "Outputs")
|
22
|
+
|
23
|
+
|
24
|
+
def test_integration_trigger__can_be_subclassed():
|
25
|
+
"""IntegrationTrigger can be subclassed to create concrete triggers."""
|
26
|
+
|
27
|
+
# GIVEN a concrete implementation of IntegrationTrigger
|
28
|
+
class TestTrigger(IntegrationTrigger):
|
29
|
+
class Outputs(IntegrationTrigger.Outputs):
|
30
|
+
data: str
|
31
|
+
|
32
|
+
@classmethod
|
33
|
+
def process_event(cls, event_data: dict):
|
34
|
+
return cls.Outputs(data=event_data.get("data", ""))
|
35
|
+
|
36
|
+
# WHEN we process an event
|
37
|
+
result = TestTrigger.process_event({"data": "test"})
|
38
|
+
|
39
|
+
# THEN it returns the expected outputs
|
40
|
+
assert result.data == "test"
|
41
|
+
|
42
|
+
|
43
|
+
def test_integration_trigger__graph_syntax():
|
44
|
+
"""IntegrationTrigger can be used in graph syntax."""
|
45
|
+
|
46
|
+
# GIVEN a concrete trigger and a node
|
47
|
+
class TestTrigger(IntegrationTrigger):
|
48
|
+
class Outputs(IntegrationTrigger.Outputs):
|
49
|
+
value: str
|
50
|
+
|
51
|
+
@classmethod
|
52
|
+
def process_event(cls, event_data: dict):
|
53
|
+
return cls.Outputs(value=event_data.get("value", ""))
|
54
|
+
|
55
|
+
class TestNode(BaseNode):
|
56
|
+
pass
|
57
|
+
|
58
|
+
# WHEN we use trigger >> node syntax
|
59
|
+
graph = TestTrigger >> TestNode
|
60
|
+
|
61
|
+
# THEN a graph is created
|
62
|
+
assert graph is not None
|
63
|
+
assert len(list(graph.trigger_edges)) == 1
|
64
|
+
assert list(graph.trigger_edges)[0].trigger_class == TestTrigger
|
65
|
+
assert list(graph.trigger_edges)[0].to_node == TestNode
|
66
|
+
|
67
|
+
|
68
|
+
def test_integration_trigger__multiple_entrypoints():
|
69
|
+
"""IntegrationTrigger works with multiple entry points."""
|
70
|
+
|
71
|
+
# GIVEN a trigger and multiple nodes
|
72
|
+
class TestTrigger(IntegrationTrigger):
|
73
|
+
class Outputs(IntegrationTrigger.Outputs):
|
74
|
+
msg: str
|
75
|
+
|
76
|
+
@classmethod
|
77
|
+
def process_event(cls, event_data: dict):
|
78
|
+
return cls.Outputs(msg=event_data.get("msg", ""))
|
79
|
+
|
80
|
+
class NodeA(BaseNode):
|
81
|
+
pass
|
82
|
+
|
83
|
+
class NodeB(BaseNode):
|
84
|
+
pass
|
85
|
+
|
86
|
+
# WHEN we use trigger >> {nodes} syntax
|
87
|
+
graph = TestTrigger >> {NodeA, NodeB}
|
88
|
+
|
89
|
+
# THEN both nodes are entrypoints
|
90
|
+
trigger_edges = list(graph.trigger_edges)
|
91
|
+
assert len(trigger_edges) == 2
|
92
|
+
target_nodes = {edge.to_node for edge in trigger_edges}
|
93
|
+
assert target_nodes == {NodeA, NodeB}
|
94
|
+
|
95
|
+
|
96
|
+
def test_integration_trigger__config_attribute():
|
97
|
+
"""IntegrationTrigger has optional config attribute."""
|
98
|
+
|
99
|
+
# GIVEN IntegrationTrigger
|
100
|
+
# THEN it has a config class variable
|
101
|
+
assert hasattr(IntegrationTrigger, "config")
|
102
|
+
assert IntegrationTrigger.config is None
|
@@ -4,7 +4,6 @@ from functools import lru_cache
|
|
4
4
|
import importlib
|
5
5
|
import inspect
|
6
6
|
import logging
|
7
|
-
from threading import Event as ThreadingEvent
|
8
7
|
from uuid import UUID, uuid4
|
9
8
|
from typing import (
|
10
9
|
Any,
|
@@ -27,6 +26,8 @@ from typing import (
|
|
27
26
|
overload,
|
28
27
|
)
|
29
28
|
|
29
|
+
from pydantic import ValidationError
|
30
|
+
|
30
31
|
from vellum.workflows.edges import Edge
|
31
32
|
from vellum.workflows.emitters.base import BaseWorkflowEmitter
|
32
33
|
from vellum.workflows.errors import WorkflowError, WorkflowErrorCode
|
@@ -76,6 +77,7 @@ from vellum.workflows.runner.runner import ExternalInputsArg, RunFromNodeArg
|
|
76
77
|
from vellum.workflows.state.base import BaseState, StateMeta
|
77
78
|
from vellum.workflows.state.context import WorkflowContext
|
78
79
|
from vellum.workflows.state.store import Store
|
80
|
+
from vellum.workflows.types import CancelSignal
|
79
81
|
from vellum.workflows.types.generics import InputsType, StateType
|
80
82
|
from vellum.workflows.types.utils import get_original_base
|
81
83
|
from vellum.workflows.utils.uuids import uuid4_from_hash
|
@@ -227,12 +229,12 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
|
|
227
229
|
WorkflowEvent = Union[ # type: ignore
|
228
230
|
GenericWorkflowEvent,
|
229
231
|
WorkflowExecutionInitiatedEvent[InputsType, StateType], # type: ignore[valid-type]
|
230
|
-
WorkflowExecutionFulfilledEvent[Outputs],
|
232
|
+
WorkflowExecutionFulfilledEvent[Outputs, StateType], # type: ignore[valid-type]
|
231
233
|
WorkflowExecutionSnapshottedEvent[StateType], # type: ignore[valid-type]
|
232
234
|
]
|
233
235
|
|
234
236
|
TerminalWorkflowEvent = Union[
|
235
|
-
WorkflowExecutionFulfilledEvent[Outputs],
|
237
|
+
WorkflowExecutionFulfilledEvent[Outputs, StateType], # type: ignore[valid-type]
|
236
238
|
WorkflowExecutionRejectedEvent,
|
237
239
|
WorkflowExecutionPausedEvent,
|
238
240
|
]
|
@@ -374,7 +376,7 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
|
|
374
376
|
entrypoint_nodes: Optional[RunFromNodeArg] = None,
|
375
377
|
external_inputs: Optional[ExternalInputsArg] = None,
|
376
378
|
previous_execution_id: Optional[Union[str, UUID]] = None,
|
377
|
-
cancel_signal: Optional[
|
379
|
+
cancel_signal: Optional[CancelSignal] = None,
|
378
380
|
node_output_mocks: Optional[MockNodeExecutionArg] = None,
|
379
381
|
max_concurrency: Optional[int] = None,
|
380
382
|
) -> TerminalWorkflowEvent:
|
@@ -402,8 +404,8 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
|
|
402
404
|
previous_execution_id: Optional[Union[str, UUID]] = None
|
403
405
|
The execution ID of the previous execution to resume from.
|
404
406
|
|
405
|
-
cancel_signal: Optional[
|
406
|
-
A
|
407
|
+
cancel_signal: Optional[CancelSignal] = None
|
408
|
+
A cancel signal that can be used to cancel the Workflow Execution.
|
407
409
|
|
408
410
|
node_output_mocks: Optional[MockNodeExecutionArg] = None
|
409
411
|
A list of Outputs to mock for Nodes during Workflow Execution. Each mock can include a `when_condition`
|
@@ -493,7 +495,7 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
|
|
493
495
|
entrypoint_nodes: Optional[RunFromNodeArg] = None,
|
494
496
|
external_inputs: Optional[ExternalInputsArg] = None,
|
495
497
|
previous_execution_id: Optional[Union[str, UUID]] = None,
|
496
|
-
cancel_signal: Optional[
|
498
|
+
cancel_signal: Optional[CancelSignal] = None,
|
497
499
|
node_output_mocks: Optional[MockNodeExecutionArg] = None,
|
498
500
|
max_concurrency: Optional[int] = None,
|
499
501
|
) -> WorkflowEventStream:
|
@@ -522,8 +524,8 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
|
|
522
524
|
previous_execution_id: Optional[Union[str, UUID]] = None
|
523
525
|
The execution ID of the previous execution to resume from.
|
524
526
|
|
525
|
-
cancel_signal: Optional[
|
526
|
-
A
|
527
|
+
cancel_signal: Optional[CancelSignal] = None
|
528
|
+
A cancel signal that can be used to cancel the Workflow Execution.
|
527
529
|
|
528
530
|
node_output_mocks: Optional[MockNodeExecutionArg] = None
|
529
531
|
A list of Outputs to mock for Nodes during Workflow Execution. Each mock can include a `when_condition`
|
@@ -684,6 +686,10 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
|
|
684
686
|
workflow_path = f"{module_path}.workflow"
|
685
687
|
try:
|
686
688
|
module = importlib.import_module(workflow_path)
|
689
|
+
except ValidationError as e:
|
690
|
+
raise WorkflowInitializationException(
|
691
|
+
message=f"Pydantic Model Validation defined in Workflow Failed: {e}"
|
692
|
+
) from e
|
687
693
|
except TypeError as e:
|
688
694
|
if "Unexpected graph type" in str(e) or "unhashable type: 'set'" in str(e):
|
689
695
|
raise WorkflowInitializationException(
|
@@ -691,9 +697,17 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
|
|
691
697
|
"Please contact Vellum support for assistance with Workflow configuration."
|
692
698
|
) from e
|
693
699
|
else:
|
694
|
-
raise
|
695
|
-
except
|
696
|
-
raise WorkflowInitializationException(message=f"
|
700
|
+
raise WorkflowInitializationException(message=f"Type Error raised while loading Workflow: {e}") from e
|
701
|
+
except SyntaxError as e:
|
702
|
+
raise WorkflowInitializationException(message=f"Syntax Error raised while loading Workflow: {e}") from e
|
703
|
+
except ModuleNotFoundError as e:
|
704
|
+
raise WorkflowInitializationException(message=f"Workflow module not found: {e}") from e
|
705
|
+
except ImportError as e:
|
706
|
+
raise WorkflowInitializationException(message=f"Invalid import found while loading Workflow: {e}") from e
|
707
|
+
except NameError as e:
|
708
|
+
raise WorkflowInitializationException(message=f"Invalid variable reference: {e}") from e
|
709
|
+
except Exception as e:
|
710
|
+
raise WorkflowInitializationException(message=f"Unexpected failure while loading module: {e}") from e
|
697
711
|
workflows: List[Type[BaseWorkflow]] = []
|
698
712
|
for name in dir(module):
|
699
713
|
if name.startswith("__"):
|