vellum-ai 0.10.9__py3-none-any.whl → 0.11.0__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- vellum/client/core/client_wrapper.py +1 -1
- vellum/evaluations/resources.py +7 -12
- vellum/evaluations/utils/env.py +1 -3
- vellum/evaluations/utils/paginator.py +0 -1
- vellum/evaluations/utils/typing.py +1 -1
- vellum/evaluations/utils/uuid.py +1 -1
- vellum/plugins/vellum_mypy.py +3 -1
- vellum/workflows/events/node.py +7 -6
- vellum/workflows/events/tests/test_event.py +0 -1
- vellum/workflows/events/types.py +0 -1
- vellum/workflows/events/workflow.py +19 -1
- vellum/workflows/nodes/bases/base.py +17 -56
- vellum/workflows/nodes/bases/tests/test_base_node.py +0 -1
- vellum/workflows/nodes/core/templating_node/node.py +1 -0
- vellum/workflows/nodes/core/try_node/node.py +2 -2
- vellum/workflows/nodes/core/try_node/tests/test_node.py +1 -3
- vellum/workflows/nodes/displayable/bases/api_node/node.py +1 -1
- vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +0 -1
- vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +0 -1
- vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py +2 -1
- vellum/workflows/nodes/displayable/bases/search_node.py +0 -1
- vellum/workflows/nodes/displayable/code_execution_node/tests/test_code_execution_node.py +0 -1
- vellum/workflows/nodes/displayable/code_execution_node/utils.py +3 -2
- vellum/workflows/nodes/displayable/conditional_node/node.py +1 -1
- vellum/workflows/nodes/displayable/guardrail_node/node.py +0 -1
- vellum/workflows/nodes/displayable/inline_prompt_node/node.py +1 -0
- vellum/workflows/nodes/displayable/prompt_deployment_node/node.py +3 -1
- vellum/workflows/nodes/displayable/search_node/node.py +1 -0
- vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py +3 -2
- vellum/workflows/nodes/displayable/tests/test_inline_text_prompt_node.py +10 -7
- vellum/workflows/nodes/displayable/tests/test_search_node_wth_text_output.py +0 -1
- vellum/workflows/outputs/base.py +2 -4
- vellum/workflows/ports/node_ports.py +1 -1
- vellum/workflows/runner/runner.py +152 -191
- vellum/workflows/state/base.py +0 -2
- vellum/workflows/types/core.py +1 -0
- vellum/workflows/types/tests/test_utils.py +1 -0
- vellum/workflows/types/utils.py +0 -1
- vellum/workflows/utils/functions.py +74 -0
- vellum/workflows/utils/tests/test_functions.py +171 -0
- vellum/workflows/utils/tests/test_vellum_variables.py +0 -1
- vellum/workflows/utils/vellum_variables.py +2 -2
- vellum/workflows/workflows/base.py +74 -34
- vellum/workflows/workflows/event_filters.py +4 -12
- {vellum_ai-0.10.9.dist-info → vellum_ai-0.11.0.dist-info}/METADATA +1 -1
- {vellum_ai-0.10.9.dist-info → vellum_ai-0.11.0.dist-info}/RECORD +96 -90
- vellum_cli/__init__.py +147 -13
- vellum_cli/config.py +0 -1
- vellum_cli/image_push.py +1 -1
- vellum_cli/pull.py +29 -19
- vellum_cli/push.py +9 -10
- vellum_cli/tests/__init__.py +0 -0
- vellum_cli/tests/conftest.py +40 -0
- vellum_cli/tests/test_main.py +11 -0
- vellum_cli/tests/test_pull.py +125 -71
- vellum_cli/tests/test_push.py +173 -0
- vellum_ee/workflows/display/nodes/base_node_display.py +3 -2
- vellum_ee/workflows/display/nodes/base_node_vellum_display.py +2 -2
- vellum_ee/workflows/display/nodes/get_node_display_class.py +1 -1
- vellum_ee/workflows/display/nodes/tests/test_base_node_display.py +1 -1
- vellum_ee/workflows/display/nodes/vellum/__init__.py +1 -1
- vellum_ee/workflows/display/nodes/vellum/api_node.py +4 -7
- vellum_ee/workflows/display/nodes/vellum/conditional_node.py +39 -22
- vellum_ee/workflows/display/nodes/vellum/error_node.py +3 -3
- vellum_ee/workflows/display/nodes/vellum/final_output_node.py +0 -2
- vellum_ee/workflows/display/nodes/vellum/guardrail_node.py +1 -1
- vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +1 -1
- vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py +4 -2
- vellum_ee/workflows/display/nodes/vellum/map_node.py +11 -5
- vellum_ee/workflows/display/nodes/vellum/merge_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/note_node.py +1 -3
- vellum_ee/workflows/display/nodes/vellum/prompt_deployment_node.py +1 -1
- vellum_ee/workflows/display/nodes/vellum/search_node.py +1 -1
- vellum_ee/workflows/display/nodes/vellum/subworkflow_deployment_node.py +1 -1
- vellum_ee/workflows/display/nodes/vellum/templating_node.py +1 -1
- vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py +5 -5
- vellum_ee/workflows/display/nodes/vellum/utils.py +4 -4
- vellum_ee/workflows/display/tests/test_vellum_workflow_display.py +45 -0
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_api_node_serialization.py +13 -24
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_conditional_node_serialization.py +13 -39
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_guardrail_node_serialization.py +2 -2
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_inline_subworkflow_serialization.py +62 -58
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_map_node_serialization.py +25 -4
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_merge_node_serialization.py +2 -1
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_prompt_deployment_serialization.py +2 -2
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_subworkflow_deployment_serialization.py +2 -2
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_terminal_node_serialization.py +1 -1
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_try_node_serialization.py +2 -1
- vellum_ee/workflows/display/tests/workflow_serialization/test_complex_terminal_node_serialization.py +2 -2
- vellum_ee/workflows/display/types.py +4 -4
- vellum_ee/workflows/display/utils/vellum.py +2 -6
- vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py +4 -1
- vellum_ee/workflows/display/workflows/vellum_workflow_display.py +6 -2
- vellum/workflows/runner/types.py +0 -16
- {vellum_ai-0.10.9.dist-info → vellum_ai-0.11.0.dist-info}/LICENSE +0 -0
- {vellum_ai-0.10.9.dist-info → vellum_ai-0.11.0.dist-info}/WHEEL +0 -0
- {vellum_ai-0.10.9.dist-info → vellum_ai-0.11.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,74 @@
|
|
1
|
+
import dataclasses
|
2
|
+
import inspect
|
3
|
+
from typing import Any, Callable, Union, get_args, get_origin
|
4
|
+
|
5
|
+
from vellum.client.types.function_definition import FunctionDefinition
|
6
|
+
|
7
|
+
type_map = {
|
8
|
+
str: "string",
|
9
|
+
int: "integer",
|
10
|
+
float: "number",
|
11
|
+
bool: "boolean",
|
12
|
+
list: "array",
|
13
|
+
dict: "object",
|
14
|
+
None: "null",
|
15
|
+
type(None): "null",
|
16
|
+
}
|
17
|
+
|
18
|
+
|
19
|
+
def _compile_annotation(annotation: Any, defs: dict[str, Any]) -> dict:
|
20
|
+
if get_origin(annotation) is Union:
|
21
|
+
return {"anyOf": [_compile_annotation(a, defs) for a in get_args(annotation)]}
|
22
|
+
|
23
|
+
if get_origin(annotation) is dict:
|
24
|
+
_, value_type = get_args(annotation)
|
25
|
+
return {"type": "object", "additionalProperties": _compile_annotation(value_type, defs)}
|
26
|
+
|
27
|
+
if get_origin(annotation) is list:
|
28
|
+
item_type = get_args(annotation)[0]
|
29
|
+
return {"type": "array", "items": _compile_annotation(item_type, defs)}
|
30
|
+
|
31
|
+
if dataclasses.is_dataclass(annotation):
|
32
|
+
if annotation.__name__ not in defs:
|
33
|
+
properties = {}
|
34
|
+
required = []
|
35
|
+
for field in dataclasses.fields(annotation):
|
36
|
+
properties[field.name] = _compile_annotation(field.type, defs)
|
37
|
+
if field.default is dataclasses.MISSING:
|
38
|
+
required.append(field.name)
|
39
|
+
else:
|
40
|
+
properties[field.name]["default"] = field.default
|
41
|
+
defs[annotation.__name__] = {"type": "object", "properties": properties, "required": required}
|
42
|
+
return {"$ref": f"#/$defs/{annotation.__name__}"}
|
43
|
+
|
44
|
+
return {"type": type_map[annotation]}
|
45
|
+
|
46
|
+
|
47
|
+
def compile_function_definition(function: Callable) -> FunctionDefinition:
|
48
|
+
"""
|
49
|
+
Converts a Python function into our Vellum-native FunctionDefinition type.
|
50
|
+
"""
|
51
|
+
|
52
|
+
try:
|
53
|
+
signature = inspect.signature(function)
|
54
|
+
except ValueError as e:
|
55
|
+
raise ValueError(f"Failed to get signature for function {function.__name__}: {str(e)}")
|
56
|
+
|
57
|
+
properties = {}
|
58
|
+
required = []
|
59
|
+
defs: dict[str, Any] = {}
|
60
|
+
for param in signature.parameters.values():
|
61
|
+
properties[param.name] = _compile_annotation(param.annotation, defs)
|
62
|
+
if param.default is inspect.Parameter.empty:
|
63
|
+
required.append(param.name)
|
64
|
+
else:
|
65
|
+
properties[param.name]["default"] = param.default
|
66
|
+
|
67
|
+
parameters = {"type": "object", "properties": properties, "required": required}
|
68
|
+
if defs:
|
69
|
+
parameters["$defs"] = defs
|
70
|
+
|
71
|
+
return FunctionDefinition(
|
72
|
+
name=function.__name__,
|
73
|
+
parameters=parameters,
|
74
|
+
)
|
@@ -0,0 +1,171 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
from typing import Dict, List, Optional, Union
|
3
|
+
|
4
|
+
from vellum.client.types.function_definition import FunctionDefinition
|
5
|
+
from vellum.workflows.utils.functions import compile_function_definition
|
6
|
+
|
7
|
+
|
8
|
+
def test_compile_function_definition__just_name():
|
9
|
+
# GIVEN a function with just a name
|
10
|
+
def my_function():
|
11
|
+
pass
|
12
|
+
|
13
|
+
# WHEN compiling the function
|
14
|
+
compiled_function = compile_function_definition(my_function)
|
15
|
+
|
16
|
+
# THEN it should return the compiled function definition
|
17
|
+
assert compiled_function == FunctionDefinition(
|
18
|
+
name="my_function",
|
19
|
+
parameters={"type": "object", "properties": {}, "required": []},
|
20
|
+
)
|
21
|
+
|
22
|
+
|
23
|
+
def test_compile_function_definition__all_args():
|
24
|
+
# GIVEN a function with args of all base types
|
25
|
+
def my_function(a: str, b: int, c: float, d: bool, e: list, f: dict):
|
26
|
+
pass
|
27
|
+
|
28
|
+
# WHEN compiling the function
|
29
|
+
compiled_function = compile_function_definition(my_function)
|
30
|
+
|
31
|
+
# THEN it should return the compiled function definition
|
32
|
+
assert compiled_function == FunctionDefinition(
|
33
|
+
name="my_function",
|
34
|
+
parameters={
|
35
|
+
"type": "object",
|
36
|
+
"properties": {
|
37
|
+
"a": {"type": "string"},
|
38
|
+
"b": {"type": "integer"},
|
39
|
+
"c": {"type": "number"},
|
40
|
+
"d": {"type": "boolean"},
|
41
|
+
"e": {"type": "array"},
|
42
|
+
"f": {"type": "object"},
|
43
|
+
},
|
44
|
+
"required": ["a", "b", "c", "d", "e", "f"],
|
45
|
+
},
|
46
|
+
)
|
47
|
+
|
48
|
+
|
49
|
+
def test_compile_function_definition__unions():
|
50
|
+
# GIVEN a function with a union arg
|
51
|
+
def my_function(a: Union[str, int]):
|
52
|
+
pass
|
53
|
+
|
54
|
+
# WHEN compiling the function
|
55
|
+
compiled_function = compile_function_definition(my_function)
|
56
|
+
|
57
|
+
# THEN it should return the compiled function definition
|
58
|
+
assert compiled_function == FunctionDefinition(
|
59
|
+
name="my_function",
|
60
|
+
parameters={
|
61
|
+
"type": "object",
|
62
|
+
"properties": {
|
63
|
+
"a": {"anyOf": [{"type": "string"}, {"type": "integer"}]},
|
64
|
+
},
|
65
|
+
"required": ["a"],
|
66
|
+
},
|
67
|
+
)
|
68
|
+
|
69
|
+
|
70
|
+
def test_compile_function_definition__optionals():
|
71
|
+
# GIVEN a function with various ways to specify optionals
|
72
|
+
def my_function(
|
73
|
+
a: str,
|
74
|
+
b: Optional[str],
|
75
|
+
c: None,
|
76
|
+
d: str = "hello",
|
77
|
+
e: Optional[str] = None,
|
78
|
+
):
|
79
|
+
pass
|
80
|
+
|
81
|
+
# WHEN compiling the function
|
82
|
+
compiled_function = compile_function_definition(my_function)
|
83
|
+
|
84
|
+
# THEN it should return the compiled function definition
|
85
|
+
assert compiled_function == FunctionDefinition(
|
86
|
+
name="my_function",
|
87
|
+
parameters={
|
88
|
+
"type": "object",
|
89
|
+
"properties": {
|
90
|
+
"a": {"type": "string"},
|
91
|
+
"b": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
92
|
+
"c": {"type": "null"},
|
93
|
+
"d": {"type": "string", "default": "hello"},
|
94
|
+
"e": {"anyOf": [{"type": "string"}, {"type": "null"}], "default": None},
|
95
|
+
},
|
96
|
+
"required": ["a", "b", "c"],
|
97
|
+
},
|
98
|
+
)
|
99
|
+
|
100
|
+
|
101
|
+
def test_compile_function_definition__parameterized_dicts():
|
102
|
+
# GIVEN a function with a parameterized dict
|
103
|
+
def my_function(a: Dict[str, int]):
|
104
|
+
pass
|
105
|
+
|
106
|
+
# WHEN compiling the function
|
107
|
+
compiled_function = compile_function_definition(my_function)
|
108
|
+
|
109
|
+
# THEN it should return the compiled function definition
|
110
|
+
assert compiled_function == FunctionDefinition(
|
111
|
+
name="my_function",
|
112
|
+
parameters={
|
113
|
+
"type": "object",
|
114
|
+
"properties": {
|
115
|
+
"a": {"type": "object", "additionalProperties": {"type": "integer"}},
|
116
|
+
},
|
117
|
+
"required": ["a"],
|
118
|
+
},
|
119
|
+
)
|
120
|
+
|
121
|
+
|
122
|
+
def test_compile_function_definition__parameterized_lists():
|
123
|
+
# GIVEN a function with a parameterized list
|
124
|
+
def my_function(a: List[int]):
|
125
|
+
pass
|
126
|
+
|
127
|
+
# WHEN compiling the function
|
128
|
+
compiled_function = compile_function_definition(my_function)
|
129
|
+
|
130
|
+
# THEN it should return the compiled function definition
|
131
|
+
assert compiled_function == FunctionDefinition(
|
132
|
+
name="my_function",
|
133
|
+
parameters={
|
134
|
+
"type": "object",
|
135
|
+
"properties": {
|
136
|
+
"a": {"type": "array", "items": {"type": "integer"}},
|
137
|
+
},
|
138
|
+
"required": ["a"],
|
139
|
+
},
|
140
|
+
)
|
141
|
+
|
142
|
+
|
143
|
+
def test_compile_function_definition__dataclasses():
|
144
|
+
# GIVEN a function with a dataclass
|
145
|
+
@dataclass
|
146
|
+
class MyDataClass:
|
147
|
+
a: int
|
148
|
+
b: str
|
149
|
+
|
150
|
+
def my_function(c: MyDataClass):
|
151
|
+
pass
|
152
|
+
|
153
|
+
# WHEN compiling the function
|
154
|
+
compiled_function = compile_function_definition(my_function)
|
155
|
+
|
156
|
+
# THEN it should return the compiled function definition
|
157
|
+
assert compiled_function == FunctionDefinition(
|
158
|
+
name="my_function",
|
159
|
+
parameters={
|
160
|
+
"type": "object",
|
161
|
+
"properties": {"c": {"$ref": "#/$defs/MyDataClass"}},
|
162
|
+
"required": ["c"],
|
163
|
+
"$defs": {
|
164
|
+
"MyDataClass": {
|
165
|
+
"type": "object",
|
166
|
+
"properties": {"a": {"type": "integer"}, "b": {"type": "string"}},
|
167
|
+
"required": ["a", "b"],
|
168
|
+
}
|
169
|
+
},
|
170
|
+
},
|
171
|
+
)
|
@@ -40,8 +40,8 @@ def primitive_type_to_vellum_variable_type(type_: Union[Type, BaseDescriptor]) -
|
|
40
40
|
int,
|
41
41
|
float,
|
42
42
|
str,
|
43
|
-
typing.List[typing.ForwardRef(
|
44
|
-
typing.Dict[str, typing.ForwardRef(
|
43
|
+
typing.List[typing.ForwardRef("Json")], # type: ignore [misc]
|
44
|
+
typing.Dict[str, typing.ForwardRef("Json")], # type: ignore [misc]
|
45
45
|
]:
|
46
46
|
return "JSON"
|
47
47
|
raise ValueError(f"Expected Descriptor to only have one type, got {types}")
|
@@ -4,7 +4,6 @@ import importlib
|
|
4
4
|
import inspect
|
5
5
|
|
6
6
|
from vellum.plugins.utils import load_runtime_plugins
|
7
|
-
from vellum.workflows.events.types import CodeResourceDefinition
|
8
7
|
from vellum.workflows.workflows.event_filters import workflow_event_filter
|
9
8
|
|
10
9
|
load_runtime_plugins()
|
@@ -62,6 +61,8 @@ from vellum.workflows.events.workflow import (
|
|
62
61
|
WorkflowExecutionRejectedEvent,
|
63
62
|
WorkflowExecutionResumedBody,
|
64
63
|
WorkflowExecutionResumedEvent,
|
64
|
+
WorkflowExecutionSnapshottedBody,
|
65
|
+
WorkflowExecutionSnapshottedEvent,
|
65
66
|
WorkflowExecutionStreamingBody,
|
66
67
|
WorkflowExecutionStreamingEvent,
|
67
68
|
)
|
@@ -82,13 +83,16 @@ from vellum.workflows.types.utils import get_original_base
|
|
82
83
|
class _BaseWorkflowMeta(type):
|
83
84
|
def __new__(mcs, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]) -> Any:
|
84
85
|
if "graph" not in dct:
|
85
|
-
dct["graph"] =
|
86
|
+
dct["graph"] = set()
|
86
87
|
|
87
88
|
return super().__new__(mcs, name, bases, dct)
|
88
89
|
|
89
90
|
|
91
|
+
GraphAttribute = Union[Type[BaseNode], Graph, Set[Type[BaseNode]], Set[Graph]]
|
92
|
+
|
93
|
+
|
90
94
|
class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkflowMeta):
|
91
|
-
graph: ClassVar[
|
95
|
+
graph: ClassVar[GraphAttribute]
|
92
96
|
emitters: List[BaseWorkflowEmitter]
|
93
97
|
resolvers: List[BaseWorkflowResolver]
|
94
98
|
|
@@ -99,6 +103,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
99
103
|
GenericWorkflowEvent,
|
100
104
|
WorkflowExecutionInitiatedEvent[WorkflowInputsType], # type: ignore[valid-type]
|
101
105
|
WorkflowExecutionFulfilledEvent[Outputs],
|
106
|
+
WorkflowExecutionSnapshottedEvent[StateType], # type: ignore[valid-type]
|
102
107
|
]
|
103
108
|
|
104
109
|
TerminalWorkflowEvent = Union[
|
@@ -118,9 +123,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
118
123
|
):
|
119
124
|
self._parent_state = parent_state
|
120
125
|
self.emitters = emitters or (self.emitters if hasattr(self, "emitters") else [])
|
121
|
-
self.resolvers = resolvers or (
|
122
|
-
self.resolvers if hasattr(self, "resolvers") else []
|
123
|
-
)
|
126
|
+
self.resolvers = resolvers or (self.resolvers if hasattr(self, "resolvers") else [])
|
124
127
|
self._context = context or WorkflowContext()
|
125
128
|
self._store = Store()
|
126
129
|
|
@@ -137,8 +140,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
137
140
|
return [original_graph]
|
138
141
|
if isinstance(original_graph, set):
|
139
142
|
return [
|
140
|
-
subgraph if isinstance(subgraph, Graph) else Graph.from_node(subgraph)
|
141
|
-
for subgraph in original_graph
|
143
|
+
subgraph if isinstance(subgraph, Graph) else Graph.from_node(subgraph) for subgraph in original_graph
|
142
144
|
]
|
143
145
|
if issubclass(original_graph, BaseNode):
|
144
146
|
return [Graph.from_node(original_graph)]
|
@@ -186,6 +188,31 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
186
188
|
external_inputs: Optional[ExternalInputsArg] = None,
|
187
189
|
cancel_signal: Optional[ThreadingEvent] = None,
|
188
190
|
) -> TerminalWorkflowEvent:
|
191
|
+
"""
|
192
|
+
Invoke a Workflow, returning the last event emitted, which should be one of:
|
193
|
+
- `WorkflowExecutionFulfilledEvent` if the Workflow Execution was successful
|
194
|
+
- `WorkflowExecutionRejectedEvent` if the Workflow Execution was rejected
|
195
|
+
- `WorkflowExecutionPausedEvent` if the Workflow Execution was paused
|
196
|
+
|
197
|
+
Parameters
|
198
|
+
----------
|
199
|
+
inputs: Optional[WorkflowInputsType] = None
|
200
|
+
The Inputs instance used to initiate the Workflow Execution.
|
201
|
+
|
202
|
+
state: Optional[StateType] = None
|
203
|
+
The State instance to run the Workflow with. Workflows maintain a global state that can be used to
|
204
|
+
deterministically resume execution from any point.
|
205
|
+
|
206
|
+
entrypoint_nodes: Optional[RunFromNodeArg] = None
|
207
|
+
The entrypoint nodes to run the Workflow with. Useful for resuming execution from a specific node.
|
208
|
+
|
209
|
+
external_inputs: Optional[ExternalInputsArg] = None
|
210
|
+
External inputs to pass to the Workflow. Useful for providing human-in-the-loop behavior to the Workflow.
|
211
|
+
|
212
|
+
cancel_signal: Optional[ThreadingEvent] = None
|
213
|
+
A threading event that can be used to cancel the Workflow Execution.
|
214
|
+
"""
|
215
|
+
|
189
216
|
events = WorkflowRunner(
|
190
217
|
self,
|
191
218
|
inputs=inputs,
|
@@ -195,15 +222,10 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
195
222
|
cancel_signal=cancel_signal,
|
196
223
|
parent_context=self._context.parent_context,
|
197
224
|
).stream()
|
198
|
-
first_event: Optional[
|
199
|
-
Union[WorkflowExecutionInitiatedEvent, WorkflowExecutionResumedEvent]
|
200
|
-
] = None
|
225
|
+
first_event: Optional[Union[WorkflowExecutionInitiatedEvent, WorkflowExecutionResumedEvent]] = None
|
201
226
|
last_event = None
|
202
227
|
for event in events:
|
203
|
-
if
|
204
|
-
event.name == "workflow.execution.initiated"
|
205
|
-
or event.name == "workflow.execution.resumed"
|
206
|
-
):
|
228
|
+
if event.name == "workflow.execution.initiated" or event.name == "workflow.execution.resumed":
|
207
229
|
first_event = event
|
208
230
|
last_event = event
|
209
231
|
|
@@ -254,15 +276,39 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
254
276
|
|
255
277
|
def stream(
|
256
278
|
self,
|
257
|
-
event_filter: Optional[
|
258
|
-
Callable[[Type["BaseWorkflow"], WorkflowEvent], bool]
|
259
|
-
] = None,
|
279
|
+
event_filter: Optional[Callable[[Type["BaseWorkflow"], WorkflowEvent], bool]] = None,
|
260
280
|
inputs: Optional[WorkflowInputsType] = None,
|
261
281
|
state: Optional[StateType] = None,
|
262
282
|
entrypoint_nodes: Optional[RunFromNodeArg] = None,
|
263
283
|
external_inputs: Optional[ExternalInputsArg] = None,
|
264
284
|
cancel_signal: Optional[ThreadingEvent] = None,
|
265
285
|
) -> WorkflowEventStream:
|
286
|
+
"""
|
287
|
+
Invoke a Workflow, yielding events as they are emitted.
|
288
|
+
|
289
|
+
Parameters
|
290
|
+
----------
|
291
|
+
event_filter: Optional[Callable[[Type["BaseWorkflow"], WorkflowEvent], bool]] = None
|
292
|
+
A filter that can be used to filter events based on the Workflow Class and the event itself. If the method
|
293
|
+
returns `False`, the event will not be yielded.
|
294
|
+
|
295
|
+
inputs: Optional[WorkflowInputsType] = None
|
296
|
+
The Inputs instance used to initiate the Workflow Execution.
|
297
|
+
|
298
|
+
state: Optional[StateType] = None
|
299
|
+
The State instance to run the Workflow with. Workflows maintain a global state that can be used to
|
300
|
+
deterministically resume execution from any point.
|
301
|
+
|
302
|
+
entrypoint_nodes: Optional[RunFromNodeArg] = None
|
303
|
+
The entrypoint nodes to run the Workflow with. Useful for resuming execution from a specific node.
|
304
|
+
|
305
|
+
external_inputs: Optional[ExternalInputsArg] = None
|
306
|
+
External inputs to pass to the Workflow. Useful for providing human-in-the-loop behavior to the Workflow.
|
307
|
+
|
308
|
+
cancel_signal: Optional[ThreadingEvent] = None
|
309
|
+
A threading event that can be used to cancel the Workflow Execution.
|
310
|
+
"""
|
311
|
+
|
266
312
|
should_yield = event_filter or workflow_event_filter
|
267
313
|
for event in WorkflowRunner(
|
268
314
|
self,
|
@@ -299,14 +345,10 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
299
345
|
state_type = BaseState
|
300
346
|
|
301
347
|
if not issubclass(inputs_type, BaseInputs):
|
302
|
-
raise ValueError(
|
303
|
-
f"Expected first type to be a subclass of BaseInputs, was: {inputs_type}"
|
304
|
-
)
|
348
|
+
raise ValueError(f"Expected first type to be a subclass of BaseInputs, was: {inputs_type}")
|
305
349
|
|
306
350
|
if not issubclass(state_type, BaseState):
|
307
|
-
raise ValueError(
|
308
|
-
f"Expected second type to be a subclass of BaseState, was: {state_type}"
|
309
|
-
)
|
351
|
+
raise ValueError(f"Expected second type to be a subclass of BaseState, was: {state_type}")
|
310
352
|
|
311
353
|
return (inputs_type, state_type)
|
312
354
|
|
@@ -321,9 +363,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
321
363
|
def get_default_inputs(self) -> WorkflowInputsType:
|
322
364
|
return self.get_inputs_class()()
|
323
365
|
|
324
|
-
def get_default_state(
|
325
|
-
self, workflow_inputs: Optional[WorkflowInputsType] = None
|
326
|
-
) -> StateType:
|
366
|
+
def get_default_state(self, workflow_inputs: Optional[WorkflowInputsType] = None) -> StateType:
|
327
367
|
return self.get_state_class()(
|
328
368
|
meta=StateMeta(
|
329
369
|
parent=self._parent_state,
|
@@ -334,10 +374,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
334
374
|
def get_state_at_node(self, node: Type[BaseNode]) -> StateType:
|
335
375
|
event_ts = datetime.min
|
336
376
|
for event in self._store.events:
|
337
|
-
if
|
338
|
-
event.name == "node.execution.initiated"
|
339
|
-
and event.node_definition == node
|
340
|
-
):
|
377
|
+
if event.name == "node.execution.initiated" and event.node_definition == node:
|
341
378
|
event_ts = event.timestamp
|
342
379
|
|
343
380
|
most_recent_state_snapshot: Optional[StateType] = None
|
@@ -359,9 +396,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
359
396
|
next_state = cast(StateType, snapshot)
|
360
397
|
if not most_recent_state_snapshot:
|
361
398
|
most_recent_state_snapshot = next_state
|
362
|
-
elif
|
363
|
-
next_state.meta.updated_ts >= most_recent_state_snapshot.meta.updated_ts
|
364
|
-
):
|
399
|
+
elif next_state.meta.updated_ts >= most_recent_state_snapshot.meta.updated_ts:
|
365
400
|
most_recent_state_snapshot = next_state
|
366
401
|
|
367
402
|
if not most_recent_state_snapshot:
|
@@ -376,6 +411,9 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
376
411
|
|
377
412
|
workflows: List[Type[BaseWorkflow]] = []
|
378
413
|
for name in dir(module):
|
414
|
+
if name.startswith("__"):
|
415
|
+
continue
|
416
|
+
|
379
417
|
attr = getattr(module, name)
|
380
418
|
if (
|
381
419
|
inspect.isclass(attr)
|
@@ -399,6 +437,7 @@ WorkflowExecutionRejectedBody.model_rebuild()
|
|
399
437
|
WorkflowExecutionPausedBody.model_rebuild()
|
400
438
|
WorkflowExecutionResumedBody.model_rebuild()
|
401
439
|
WorkflowExecutionStreamingBody.model_rebuild()
|
440
|
+
WorkflowExecutionSnapshottedBody.model_rebuild()
|
402
441
|
|
403
442
|
NodeExecutionInitiatedBody.model_rebuild()
|
404
443
|
NodeExecutionFulfilledBody.model_rebuild()
|
@@ -413,6 +452,7 @@ WorkflowExecutionRejectedEvent.model_rebuild()
|
|
413
452
|
WorkflowExecutionPausedEvent.model_rebuild()
|
414
453
|
WorkflowExecutionResumedEvent.model_rebuild()
|
415
454
|
WorkflowExecutionStreamingEvent.model_rebuild()
|
455
|
+
WorkflowExecutionSnapshottedEvent.model_rebuild()
|
416
456
|
|
417
457
|
NodeExecutionInitiatedEvent.model_rebuild()
|
418
458
|
NodeExecutionFulfilledEvent.model_rebuild()
|
@@ -7,9 +7,7 @@ if TYPE_CHECKING:
|
|
7
7
|
from vellum.workflows.workflows.base import BaseWorkflow
|
8
8
|
|
9
9
|
|
10
|
-
def workflow_event_filter(
|
11
|
-
workflow_definition: Type["BaseWorkflow"], event: "WorkflowEvent"
|
12
|
-
) -> bool:
|
10
|
+
def workflow_event_filter(workflow_definition: Type["BaseWorkflow"], event: "WorkflowEvent") -> bool:
|
13
11
|
"""
|
14
12
|
Filters for only Workflow events that were emitted by the `workflow_definition` parameter.
|
15
13
|
"""
|
@@ -27,9 +25,7 @@ def workflow_event_filter(
|
|
27
25
|
return False
|
28
26
|
|
29
27
|
|
30
|
-
def root_workflow_event_filter(
|
31
|
-
workflow_definition: Type["BaseWorkflow"], event: "WorkflowEvent"
|
32
|
-
) -> bool:
|
28
|
+
def root_workflow_event_filter(workflow_definition: Type["BaseWorkflow"], event: "WorkflowEvent") -> bool:
|
33
29
|
"""
|
34
30
|
Filters for Workflow and Node events that were emitted by the `workflow_definition` parameter.
|
35
31
|
"""
|
@@ -50,12 +46,8 @@ def root_workflow_event_filter(
|
|
50
46
|
if event.parent.type != "WORKFLOW":
|
51
47
|
return False
|
52
48
|
|
53
|
-
return event.parent.workflow_definition == CodeResourceDefinition.encode(
|
54
|
-
workflow_definition
|
55
|
-
)
|
49
|
+
return event.parent.workflow_definition == CodeResourceDefinition.encode(workflow_definition)
|
56
50
|
|
57
51
|
|
58
|
-
def all_workflow_event_filter(
|
59
|
-
workflow_definition: Type["BaseWorkflow"], event: "WorkflowEvent"
|
60
|
-
) -> bool:
|
52
|
+
def all_workflow_event_filter(workflow_definition: Type["BaseWorkflow"], event: "WorkflowEvent") -> bool:
|
61
53
|
return True
|