vellum-ai 0.13.28__py3-none-any.whl → 0.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/client/core/client_wrapper.py +1 -1
- vellum/workflows/constants.py +8 -3
- vellum/workflows/descriptors/tests/test_utils.py +21 -0
- vellum/workflows/descriptors/utils.py +3 -3
- vellum/workflows/errors/types.py +4 -1
- vellum/workflows/expressions/coalesce_expression.py +2 -2
- vellum/workflows/expressions/contains.py +4 -3
- vellum/workflows/expressions/does_not_contain.py +2 -1
- vellum/workflows/expressions/is_nil.py +2 -2
- vellum/workflows/expressions/is_not_nil.py +2 -2
- vellum/workflows/expressions/is_not_undefined.py +2 -2
- vellum/workflows/expressions/is_undefined.py +2 -2
- vellum/workflows/nodes/bases/base.py +19 -3
- vellum/workflows/nodes/bases/tests/test_base_node.py +84 -0
- vellum/workflows/nodes/core/inline_subworkflow_node/node.py +3 -3
- vellum/workflows/nodes/core/map_node/node.py +5 -0
- vellum/workflows/nodes/core/map_node/tests/test_node.py +22 -0
- vellum/workflows/nodes/displayable/code_execution_node/tests/test_code_execution_node.py +68 -2
- vellum/workflows/nodes/displayable/code_execution_node/utils.py +30 -7
- vellum/workflows/outputs/base.py +21 -19
- vellum/workflows/references/external_input.py +2 -2
- vellum/workflows/references/lazy.py +2 -2
- vellum/workflows/references/output.py +7 -7
- vellum/workflows/runner/runner.py +20 -15
- vellum/workflows/state/base.py +2 -2
- vellum/workflows/state/tests/test_state.py +7 -11
- vellum/workflows/workflows/base.py +20 -0
- vellum/workflows/workflows/tests/__init__.py +0 -0
- vellum/workflows/workflows/tests/test_base_workflow.py +80 -0
- {vellum_ai-0.13.28.dist-info → vellum_ai-0.14.0.dist-info}/METADATA +1 -1
- {vellum_ai-0.13.28.dist-info → vellum_ai-0.14.0.dist-info}/RECORD +35 -33
- vellum_ee/workflows/display/nodes/base_node_display.py +2 -2
- {vellum_ai-0.13.28.dist-info → vellum_ai-0.14.0.dist-info}/LICENSE +0 -0
- {vellum_ai-0.13.28.dist-info → vellum_ai-0.14.0.dist-info}/WHEEL +0 -0
- {vellum_ai-0.13.28.dist-info → vellum_ai-0.14.0.dist-info}/entry_points.txt +0 -0
vellum/workflows/outputs/base.py
CHANGED
@@ -4,7 +4,7 @@ from typing_extensions import dataclass_transform
|
|
4
4
|
from pydantic import GetCoreSchemaHandler
|
5
5
|
from pydantic_core import core_schema
|
6
6
|
|
7
|
-
from vellum.workflows.constants import
|
7
|
+
from vellum.workflows.constants import undefined
|
8
8
|
from vellum.workflows.descriptors.base import BaseDescriptor
|
9
9
|
from vellum.workflows.errors.types import WorkflowErrorCode
|
10
10
|
from vellum.workflows.exceptions import NodeException
|
@@ -19,17 +19,17 @@ _Accumulated = TypeVar("_Accumulated")
|
|
19
19
|
|
20
20
|
|
21
21
|
class BaseOutput(Generic[_Delta, _Accumulated]):
|
22
|
-
_value: Union[_Accumulated, Type[
|
23
|
-
_delta: Union[_Delta, Type[
|
22
|
+
_value: Union[_Accumulated, Type[undefined]]
|
23
|
+
_delta: Union[_Delta, Type[undefined]]
|
24
24
|
_name: str
|
25
25
|
|
26
26
|
def __init__(
|
27
27
|
self,
|
28
28
|
name: str,
|
29
|
-
value: Union[_Accumulated, Type[
|
30
|
-
delta: Union[_Delta, Type[
|
29
|
+
value: Union[_Accumulated, Type[undefined]] = undefined,
|
30
|
+
delta: Union[_Delta, Type[undefined]] = undefined,
|
31
31
|
) -> None:
|
32
|
-
if value is not
|
32
|
+
if value is not undefined and delta is not undefined:
|
33
33
|
raise ValueError("Cannot set both value and delta")
|
34
34
|
|
35
35
|
self._name = name
|
@@ -37,24 +37,24 @@ class BaseOutput(Generic[_Delta, _Accumulated]):
|
|
37
37
|
self._delta = delta
|
38
38
|
|
39
39
|
@property
|
40
|
-
def delta(self) -> Union[_Delta, Type[
|
40
|
+
def delta(self) -> Union[_Delta, Type[undefined]]:
|
41
41
|
return self._delta
|
42
42
|
|
43
43
|
@property
|
44
|
-
def value(self) -> Union[_Accumulated, Type[
|
44
|
+
def value(self) -> Union[_Accumulated, Type[undefined]]:
|
45
45
|
return self._value
|
46
46
|
|
47
47
|
@property
|
48
48
|
def is_initiated(self) -> bool:
|
49
|
-
return self._delta is
|
49
|
+
return self._delta is undefined and self._value is undefined
|
50
50
|
|
51
51
|
@property
|
52
52
|
def is_streaming(self) -> bool:
|
53
|
-
return self._delta is not
|
53
|
+
return self._delta is not undefined and self._value is undefined
|
54
54
|
|
55
55
|
@property
|
56
56
|
def is_fulfilled(self) -> bool:
|
57
|
-
return self._delta is
|
57
|
+
return self._delta is undefined and self._value is not undefined
|
58
58
|
|
59
59
|
@property
|
60
60
|
def name(self) -> str:
|
@@ -71,18 +71,18 @@ class BaseOutput(Generic[_Delta, _Accumulated]):
|
|
71
71
|
"name": self.name,
|
72
72
|
}
|
73
73
|
|
74
|
-
if self.value is not
|
74
|
+
if self.value is not undefined:
|
75
75
|
data["value"] = self.value
|
76
76
|
|
77
|
-
if self.delta is not
|
77
|
+
if self.delta is not undefined:
|
78
78
|
data["delta"] = self.delta
|
79
79
|
|
80
80
|
return data
|
81
81
|
|
82
82
|
def __repr__(self) -> str:
|
83
|
-
if self.value is not
|
83
|
+
if self.value is not undefined:
|
84
84
|
return f"{self.__class__.__name__}({self.name}={self.value})"
|
85
|
-
elif self.delta is not
|
85
|
+
elif self.delta is not undefined:
|
86
86
|
return f"{self.__class__.__name__}({self.name}={self.delta})"
|
87
87
|
else:
|
88
88
|
return f"{self.__class__.__name__}(name='{self.name}')"
|
@@ -144,7 +144,7 @@ class _BaseOutputsMeta(type):
|
|
144
144
|
# We first try to resolve the instance that this class attribute name is mapped to. If it's not found,
|
145
145
|
# we iterate through its inheritance hierarchy to find the first base class that has this attribute
|
146
146
|
# and use its mapping.
|
147
|
-
instance = vars(cls).get(name,
|
147
|
+
instance = vars(cls).get(name, undefined)
|
148
148
|
if not instance:
|
149
149
|
for base in cls.__mro__[1:]:
|
150
150
|
if hasattr(base, name):
|
@@ -204,7 +204,9 @@ class BaseOutputs(metaclass=_BaseOutputsMeta):
|
|
204
204
|
if not isinstance(other, dict):
|
205
205
|
return super().__eq__(other)
|
206
206
|
|
207
|
-
outputs = {
|
207
|
+
outputs = {
|
208
|
+
name: value for name, value in vars(self).items() if not name.startswith("_") and value is not undefined
|
209
|
+
}
|
208
210
|
return outputs == other
|
209
211
|
|
210
212
|
def __repr__(self) -> str:
|
@@ -213,9 +215,9 @@ class BaseOutputs(metaclass=_BaseOutputsMeta):
|
|
213
215
|
|
214
216
|
def __iter__(self) -> Iterator[Tuple[OutputReference, Any]]:
|
215
217
|
for output_descriptor in self.__class__:
|
216
|
-
output_value = getattr(self, output_descriptor.name,
|
218
|
+
output_value = getattr(self, output_descriptor.name, undefined)
|
217
219
|
if isinstance(output_value, BaseDescriptor):
|
218
|
-
output_value =
|
220
|
+
output_value = undefined
|
219
221
|
|
220
222
|
yield (output_descriptor, output_value)
|
221
223
|
|
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Any, Generic, Optional, Tuple, Type, TypeVar,
|
|
3
3
|
from pydantic import GetCoreSchemaHandler
|
4
4
|
from pydantic_core import core_schema
|
5
5
|
|
6
|
-
from vellum.workflows.constants import
|
6
|
+
from vellum.workflows.constants import undefined
|
7
7
|
from vellum.workflows.descriptors.base import BaseDescriptor
|
8
8
|
from vellum.workflows.errors.types import WorkflowErrorCode
|
9
9
|
from vellum.workflows.exceptions import NodeException
|
@@ -34,7 +34,7 @@ class ExternalInputReference(BaseDescriptor[_InputType], Generic[_InputType]):
|
|
34
34
|
|
35
35
|
def resolve(self, state: "BaseState") -> _InputType:
|
36
36
|
external_input = state.meta.external_inputs.get(self)
|
37
|
-
if external_input is not
|
37
|
+
if external_input is not undefined:
|
38
38
|
return cast(_InputType, external_input)
|
39
39
|
|
40
40
|
if state.meta.parent:
|
@@ -2,7 +2,7 @@ import ast
|
|
2
2
|
import inspect
|
3
3
|
from typing import TYPE_CHECKING, Callable, Generic, TypeVar, Union, get_args
|
4
4
|
|
5
|
-
from vellum.workflows.constants import
|
5
|
+
from vellum.workflows.constants import undefined
|
6
6
|
from vellum.workflows.descriptors.base import BaseDescriptor
|
7
7
|
|
8
8
|
if TYPE_CHECKING:
|
@@ -37,7 +37,7 @@ class LazyReference(BaseDescriptor[_T], Generic[_T]):
|
|
37
37
|
|
38
38
|
# Fix typing surrounding the return value of node outputs/output descriptors
|
39
39
|
# https://app.shortcut.com/vellum/story/4783
|
40
|
-
return
|
40
|
+
return undefined # type: ignore[return-value]
|
41
41
|
|
42
42
|
return resolve_value(self._get(), state)
|
43
43
|
|
@@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any, Generator, Generic, Optional, Tuple, Type
|
|
4
4
|
from pydantic import GetCoreSchemaHandler
|
5
5
|
from pydantic_core import core_schema
|
6
6
|
|
7
|
-
from vellum.workflows.constants import
|
7
|
+
from vellum.workflows.constants import undefined
|
8
8
|
from vellum.workflows.descriptors.base import BaseDescriptor
|
9
9
|
|
10
10
|
if TYPE_CHECKING:
|
@@ -32,13 +32,13 @@ class OutputReference(BaseDescriptor[_OutputType], Generic[_OutputType]):
|
|
32
32
|
return self._outputs_class
|
33
33
|
|
34
34
|
def resolve(self, state: "BaseState") -> _OutputType:
|
35
|
-
node_output = state.meta.node_outputs.get(self,
|
35
|
+
node_output = state.meta.node_outputs.get(self, undefined)
|
36
36
|
if isinstance(node_output, Queue):
|
37
37
|
# Fix typing surrounding the return value of node outputs
|
38
38
|
# https://app.shortcut.com/vellum/story/4783
|
39
39
|
return self._as_generator(node_output) # type: ignore[return-value]
|
40
40
|
|
41
|
-
if node_output is not
|
41
|
+
if node_output is not undefined:
|
42
42
|
return cast(_OutputType, node_output)
|
43
43
|
|
44
44
|
if state.meta.parent:
|
@@ -46,13 +46,13 @@ class OutputReference(BaseDescriptor[_OutputType], Generic[_OutputType]):
|
|
46
46
|
|
47
47
|
# Fix typing surrounding the return value of node outputs
|
48
48
|
# https://app.shortcut.com/vellum/story/4783
|
49
|
-
return cast(Type[
|
49
|
+
return cast(Type[undefined], node_output) # type: ignore[return-value]
|
50
50
|
|
51
|
-
def _as_generator(self, node_output: Queue) -> Generator[_OutputType, None, Type[
|
51
|
+
def _as_generator(self, node_output: Queue) -> Generator[_OutputType, None, Type[undefined]]:
|
52
52
|
while True:
|
53
53
|
item = node_output.get()
|
54
|
-
if item is
|
55
|
-
return
|
54
|
+
if item is undefined:
|
55
|
+
return undefined
|
56
56
|
yield cast(_OutputType, item)
|
57
57
|
|
58
58
|
def __eq__(self, other: object) -> bool:
|
@@ -6,7 +6,7 @@ from threading import Event as ThreadingEvent, Thread
|
|
6
6
|
from uuid import UUID
|
7
7
|
from typing import TYPE_CHECKING, Any, Dict, Generic, Iterable, Iterator, Optional, Sequence, Set, Tuple, Type, Union
|
8
8
|
|
9
|
-
from vellum.workflows.constants import
|
9
|
+
from vellum.workflows.constants import undefined
|
10
10
|
from vellum.workflows.context import execution_context, get_parent_context
|
11
11
|
from vellum.workflows.descriptors.base import BaseDescriptor
|
12
12
|
from vellum.workflows.edges.edge import Edge
|
@@ -267,7 +267,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
267
267
|
)
|
268
268
|
elif output.is_fulfilled:
|
269
269
|
if output.name in streaming_output_queues:
|
270
|
-
streaming_output_queues[output.name].put(
|
270
|
+
streaming_output_queues[output.name].put(undefined)
|
271
271
|
|
272
272
|
setattr(outputs, output.name, output.value)
|
273
273
|
self._workflow_event_inner_queue.put(
|
@@ -286,7 +286,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
286
286
|
node.state.meta.node_execution_cache.fulfill_node_execution(node.__class__, span_id)
|
287
287
|
|
288
288
|
for descriptor, output_value in outputs:
|
289
|
-
if output_value is
|
289
|
+
if output_value is undefined:
|
290
290
|
if descriptor in node.state.meta.node_outputs:
|
291
291
|
del node.state.meta.node_outputs[descriptor]
|
292
292
|
continue
|
@@ -386,8 +386,8 @@ class WorkflowRunner(Generic[StateType]):
|
|
386
386
|
if not isinstance(descriptor, ExternalInputReference):
|
387
387
|
continue
|
388
388
|
|
389
|
-
if state.meta.external_inputs.get(descriptor,
|
390
|
-
state.meta.external_inputs[descriptor] =
|
389
|
+
if state.meta.external_inputs.get(descriptor, undefined) is undefined:
|
390
|
+
state.meta.external_inputs[descriptor] = undefined
|
391
391
|
return
|
392
392
|
|
393
393
|
all_deps = self._dependencies[node_class]
|
@@ -577,7 +577,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
577
577
|
unresolved_external_inputs = {
|
578
578
|
descriptor
|
579
579
|
for descriptor, node_input_value in final_state.meta.external_inputs.items()
|
580
|
-
if node_input_value is
|
580
|
+
if node_input_value is undefined
|
581
581
|
}
|
582
582
|
if unresolved_external_inputs:
|
583
583
|
self._workflow_event_outer_queue.put(
|
@@ -616,19 +616,21 @@ class WorkflowRunner(Generic[StateType]):
|
|
616
616
|
for emitter in self.workflow.emitters:
|
617
617
|
emitter.emit_event(item)
|
618
618
|
|
619
|
-
def _run_cancel_thread(self) -> None:
|
619
|
+
def _run_cancel_thread(self, kill_switch: ThreadingEvent) -> None:
|
620
620
|
if not self._cancel_signal:
|
621
621
|
return
|
622
622
|
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
628
|
-
|
623
|
+
while not kill_switch.wait(timeout=0.1):
|
624
|
+
if self._cancel_signal.is_set():
|
625
|
+
self._workflow_event_outer_queue.put(
|
626
|
+
self._reject_workflow_event(
|
627
|
+
WorkflowError(
|
628
|
+
code=WorkflowErrorCode.WORKFLOW_CANCELLED,
|
629
|
+
message="Workflow run cancelled",
|
630
|
+
)
|
631
|
+
)
|
629
632
|
)
|
630
|
-
|
631
|
-
)
|
633
|
+
return
|
632
634
|
|
633
635
|
def _is_terminal_event(self, event: WorkflowEvent) -> bool:
|
634
636
|
if (
|
@@ -646,10 +648,12 @@ class WorkflowRunner(Generic[StateType]):
|
|
646
648
|
)
|
647
649
|
background_thread.start()
|
648
650
|
|
651
|
+
cancel_thread_kill_switch = ThreadingEvent()
|
649
652
|
if self._cancel_signal:
|
650
653
|
cancel_thread = Thread(
|
651
654
|
target=self._run_cancel_thread,
|
652
655
|
name=f"{self.workflow.__class__.__name__}.cancel_thread",
|
656
|
+
kwargs={"kill_switch": cancel_thread_kill_switch},
|
653
657
|
)
|
654
658
|
cancel_thread.start()
|
655
659
|
|
@@ -694,3 +698,4 @@ class WorkflowRunner(Generic[StateType]):
|
|
694
698
|
)
|
695
699
|
|
696
700
|
self._background_thread_queue.put(None)
|
701
|
+
cancel_thread_kill_switch.set()
|
vellum/workflows/state/base.py
CHANGED
@@ -12,7 +12,7 @@ from pydantic import GetCoreSchemaHandler, field_serializer
|
|
12
12
|
from pydantic_core import core_schema
|
13
13
|
|
14
14
|
from vellum.core.pydantic_utilities import UniversalBaseModel
|
15
|
-
from vellum.workflows.constants import
|
15
|
+
from vellum.workflows.constants import undefined
|
16
16
|
from vellum.workflows.edges.edge import Edge
|
17
17
|
from vellum.workflows.inputs.base import BaseInputs
|
18
18
|
from vellum.workflows.references import ExternalInputReference, OutputReference, StateValueReference
|
@@ -347,7 +347,7 @@ class BaseState(metaclass=_BaseStateMeta):
|
|
347
347
|
if key.startswith("_"):
|
348
348
|
continue
|
349
349
|
|
350
|
-
if getattr(latest_state, key,
|
350
|
+
if getattr(latest_state, key, undefined) == undefined:
|
351
351
|
setattr(latest_state, key, value)
|
352
352
|
|
353
353
|
return cast(StateType, latest_state)
|
@@ -23,6 +23,9 @@ class MockState(BaseState):
|
|
23
23
|
|
24
24
|
|
25
25
|
class MockNode(BaseNode):
|
26
|
+
class ExternalInputs(BaseNode.ExternalInputs):
|
27
|
+
message: str
|
28
|
+
|
26
29
|
class Outputs(BaseOutputs):
|
27
30
|
baz: str
|
28
31
|
|
@@ -70,11 +73,7 @@ def test_state_snapshot__external_input_edit():
|
|
70
73
|
assert snapshot_count[id(state)] == 0
|
71
74
|
|
72
75
|
# WHEN we add an external input to state
|
73
|
-
|
74
|
-
message: str
|
75
|
-
|
76
|
-
# WHEN we edit external inputs dictionary
|
77
|
-
state.meta.external_inputs[MockExternalInputs.message] = "hello"
|
76
|
+
state.meta.external_inputs[MockNode.ExternalInputs.message] = "hello"
|
78
77
|
|
79
78
|
# THEN the snapshot is emitted
|
80
79
|
assert snapshot_count[id(state)] == 1
|
@@ -137,19 +136,16 @@ def test_state_deepcopy__with_external_input_updates():
|
|
137
136
|
state = MockState(foo="bar")
|
138
137
|
|
139
138
|
# AND we add an external input to state
|
140
|
-
|
141
|
-
message: str
|
142
|
-
|
143
|
-
state.meta.external_inputs[MockExternalInputs.message] = "hello"
|
139
|
+
state.meta.external_inputs[MockNode.ExternalInputs.message] = "hello"
|
144
140
|
|
145
141
|
# AND we deepcopy the state
|
146
142
|
deepcopied_state = deepcopy(state)
|
147
143
|
|
148
144
|
# AND we update the original state
|
149
|
-
state.meta.external_inputs[
|
145
|
+
state.meta.external_inputs[MockNode.ExternalInputs.message] = "world"
|
150
146
|
|
151
147
|
# THEN the copied state is not updated
|
152
|
-
assert deepcopied_state.meta.external_inputs[
|
148
|
+
assert deepcopied_state.meta.external_inputs[MockNode.ExternalInputs.message] == "hello"
|
153
149
|
|
154
150
|
# AND the original state has had the correct number of snapshots
|
155
151
|
assert snapshot_count[id(state)] == 2
|
@@ -80,6 +80,26 @@ class _BaseWorkflowMeta(type):
|
|
80
80
|
if "graph" not in dct:
|
81
81
|
dct["graph"] = set()
|
82
82
|
|
83
|
+
if "Outputs" in dct:
|
84
|
+
outputs_class = dct["Outputs"]
|
85
|
+
|
86
|
+
if not any(issubclass(base, BaseOutputs) for base in outputs_class.__bases__):
|
87
|
+
parent_outputs_class = next(
|
88
|
+
(base.Outputs for base in bases if hasattr(base, "Outputs")),
|
89
|
+
BaseOutputs, # Default to BaseOutputs only if no parent has Outputs
|
90
|
+
)
|
91
|
+
|
92
|
+
filtered_bases = tuple(base for base in outputs_class.__bases__ if base is not object)
|
93
|
+
|
94
|
+
new_dct = {key: value for key, value in outputs_class.__dict__.items() if not key.startswith("__")}
|
95
|
+
new_dct["__module__"] = dct["__module__"]
|
96
|
+
|
97
|
+
dct["Outputs"] = type(
|
98
|
+
f"{name}.Outputs",
|
99
|
+
(parent_outputs_class,) + filtered_bases,
|
100
|
+
new_dct,
|
101
|
+
)
|
102
|
+
|
83
103
|
cls = super().__new__(mcs, name, bases, dct)
|
84
104
|
workflow_class = cast(Type["BaseWorkflow"], cls)
|
85
105
|
workflow_class.__id__ = uuid4_from_hash(workflow_class.__qualname__)
|
File without changes
|
@@ -0,0 +1,80 @@
|
|
1
|
+
from vellum.workflows.inputs.base import BaseInputs
|
2
|
+
from vellum.workflows.nodes.bases.base import BaseNode
|
3
|
+
from vellum.workflows.nodes.core.inline_subworkflow_node.node import InlineSubworkflowNode
|
4
|
+
from vellum.workflows.outputs.base import BaseOutputs
|
5
|
+
from vellum.workflows.state.base import BaseState
|
6
|
+
from vellum.workflows.workflows.base import BaseWorkflow
|
7
|
+
|
8
|
+
|
9
|
+
def test_base_workflow__inherit_base_outputs():
|
10
|
+
class MyNode(BaseNode):
|
11
|
+
class Outputs(BaseNode.Outputs):
|
12
|
+
foo: str
|
13
|
+
|
14
|
+
def run(self):
|
15
|
+
return self.Outputs(foo="bar")
|
16
|
+
|
17
|
+
class MyWorkflow(BaseWorkflow[BaseInputs, BaseState]):
|
18
|
+
graph = MyNode
|
19
|
+
|
20
|
+
class Outputs:
|
21
|
+
output = MyNode.Outputs.foo
|
22
|
+
|
23
|
+
# TEST that the Outputs class is a subclass of BaseOutputs
|
24
|
+
assert issubclass(MyWorkflow.Outputs, BaseOutputs)
|
25
|
+
|
26
|
+
# TEST that the Outputs class does not inherit from object
|
27
|
+
assert object not in MyWorkflow.Outputs.__bases__
|
28
|
+
|
29
|
+
workflow = MyWorkflow()
|
30
|
+
terminal_event = workflow.run()
|
31
|
+
|
32
|
+
# TEST that the Outputs class has the correct attributes
|
33
|
+
assert hasattr(MyWorkflow.Outputs, "output")
|
34
|
+
|
35
|
+
# TEST that the outputs should be correct
|
36
|
+
assert terminal_event.name == "workflow.execution.fulfilled", terminal_event
|
37
|
+
assert terminal_event.outputs == {"output": "bar"}
|
38
|
+
|
39
|
+
|
40
|
+
def test_subworkflow__inherit_base_outputs():
|
41
|
+
class StartNode(BaseNode):
|
42
|
+
class Outputs(BaseNode.Outputs):
|
43
|
+
foo: str
|
44
|
+
|
45
|
+
def run(self):
|
46
|
+
return self.Outputs(foo="bar")
|
47
|
+
|
48
|
+
class SubWorkflow(BaseWorkflow[BaseInputs, BaseState]):
|
49
|
+
graph = StartNode
|
50
|
+
|
51
|
+
class Outputs:
|
52
|
+
output = StartNode.Outputs.foo
|
53
|
+
|
54
|
+
class SubworkflowNode(InlineSubworkflowNode):
|
55
|
+
subworkflow = SubWorkflow
|
56
|
+
|
57
|
+
class MainWorkflow(BaseWorkflow[BaseInputs, BaseState]):
|
58
|
+
graph = SubworkflowNode
|
59
|
+
|
60
|
+
class Outputs:
|
61
|
+
output = SubworkflowNode.Outputs.output
|
62
|
+
|
63
|
+
# TEST that the Outputs classes are subclasses of BaseOutputs
|
64
|
+
assert issubclass(MainWorkflow.Outputs, BaseOutputs)
|
65
|
+
assert issubclass(SubWorkflow.Outputs, BaseOutputs)
|
66
|
+
|
67
|
+
# TEST that the Outputs classes do not inherit from object
|
68
|
+
assert object not in MainWorkflow.Outputs.__bases__
|
69
|
+
assert object not in SubWorkflow.Outputs.__bases__
|
70
|
+
|
71
|
+
# TEST execution
|
72
|
+
workflow = MainWorkflow()
|
73
|
+
terminal_event = workflow.run()
|
74
|
+
|
75
|
+
# TEST that the Outputs class has the correct attributes
|
76
|
+
assert hasattr(MainWorkflow.Outputs, "output")
|
77
|
+
|
78
|
+
# TEST that the outputs are correct
|
79
|
+
assert terminal_event.name == "workflow.execution.fulfilled", terminal_event
|
80
|
+
assert terminal_event.outputs == {"output": "bar"}
|