vellum-ai 1.8.5__py3-none-any.whl → 1.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/__init__.py +6 -0
- vellum/client/core/client_wrapper.py +2 -2
- vellum/client/types/__init__.py +6 -0
- vellum/client/types/api_actor_type_enum.py +7 -0
- vellum/client/types/api_request_parent_context.py +6 -0
- vellum/client/types/external_parent_context.py +2 -0
- vellum/client/types/integration_name.py +1 -0
- vellum/client/types/integration_trigger_context.py +38 -0
- vellum/client/types/node_execution_fulfilled_event.py +2 -0
- vellum/client/types/node_execution_initiated_event.py +2 -0
- vellum/client/types/node_execution_paused_event.py +2 -0
- vellum/client/types/node_execution_rejected_event.py +2 -0
- vellum/client/types/node_execution_resumed_event.py +2 -0
- vellum/client/types/node_execution_span.py +2 -0
- vellum/client/types/node_execution_streaming_event.py +2 -0
- vellum/client/types/node_parent_context.py +2 -0
- vellum/client/types/parent_context.py +4 -0
- vellum/client/types/prompt_deployment_parent_context.py +2 -0
- vellum/client/types/scheduled_trigger_context.py +38 -0
- vellum/client/types/slim_workflow_execution_read.py +2 -0
- vellum/client/types/span_link.py +2 -0
- vellum/client/types/workflow_deployment_event_executions_response.py +2 -0
- vellum/client/types/workflow_deployment_parent_context.py +2 -0
- vellum/client/types/workflow_event_execution_read.py +2 -0
- vellum/client/types/workflow_execution_detail.py +2 -0
- vellum/client/types/workflow_execution_fulfilled_event.py +2 -0
- vellum/client/types/workflow_execution_initiated_event.py +2 -0
- vellum/client/types/workflow_execution_paused_event.py +2 -0
- vellum/client/types/workflow_execution_rejected_event.py +2 -0
- vellum/client/types/workflow_execution_resumed_event.py +2 -0
- vellum/client/types/workflow_execution_snapshotted_event.py +2 -0
- vellum/client/types/workflow_execution_span.py +2 -0
- vellum/client/types/workflow_execution_streaming_event.py +2 -0
- vellum/client/types/workflow_parent_context.py +2 -0
- vellum/client/types/workflow_sandbox_parent_context.py +2 -0
- vellum/types/api_actor_type_enum.py +3 -0
- vellum/types/integration_trigger_context.py +3 -0
- vellum/types/scheduled_trigger_context.py +3 -0
- vellum/workflows/inputs/dataset_row.py +9 -7
- vellum/workflows/nodes/displayable/final_output_node/node.py +4 -0
- vellum/workflows/nodes/displayable/final_output_node/tests/test_node.py +28 -0
- vellum/workflows/nodes/displayable/set_state_node/__init__.py +5 -0
- vellum/workflows/nodes/displayable/set_state_node/node.py +71 -0
- vellum/workflows/nodes/displayable/set_state_node/tests/__init__.py +0 -0
- vellum/workflows/nodes/displayable/set_state_node/tests/test_node.py +212 -0
- vellum/workflows/sandbox.py +13 -3
- vellum/workflows/tests/test_dataset_row.py +20 -0
- vellum/workflows/tests/test_sandbox.py +40 -0
- vellum/workflows/tests/triggers/{test_vellum_integration_trigger.py → test_integration_trigger.py} +22 -22
- vellum/workflows/triggers/__init__.py +2 -2
- vellum/workflows/triggers/base.py +22 -4
- vellum/workflows/triggers/integration.py +168 -49
- vellum/workflows/triggers/schedule.py +18 -0
- vellum/workflows/triggers/tests/test_integration.py +49 -20
- vellum/workflows/utils/uuids.py +1 -15
- vellum/workflows/workflows/base.py +44 -0
- {vellum_ai-1.8.5.dist-info → vellum_ai-1.9.0.dist-info}/METADATA +1 -1
- {vellum_ai-1.8.5.dist-info → vellum_ai-1.9.0.dist-info}/RECORD +67 -57
- vellum_ee/workflows/display/tests/workflow_serialization/{test_vellum_integration_trigger_serialization.py → test_integration_trigger_serialization.py} +8 -8
- vellum_ee/workflows/display/utils/expressions.py +2 -3
- vellum_ee/workflows/display/workflows/base_workflow_display.py +9 -9
- vellum_ee/workflows/server/virtual_file_loader.py +74 -2
- vellum_ee/workflows/tests/test_server.py +81 -66
- vellum_ee/workflows/tests/test_virtual_files.py +48 -0
- vellum/workflows/triggers/vellum_integration.py +0 -189
- {vellum_ai-1.8.5.dist-info → vellum_ai-1.9.0.dist-info}/LICENSE +0 -0
- {vellum_ai-1.8.5.dist-info → vellum_ai-1.9.0.dist-info}/WHEEL +0 -0
- {vellum_ai-1.8.5.dist-info → vellum_ai-1.9.0.dist-info}/entry_points.txt +0 -0
|
@@ -2,10 +2,12 @@ import pytest
|
|
|
2
2
|
from typing import Any, Dict
|
|
3
3
|
|
|
4
4
|
from vellum.workflows.exceptions import NodeException
|
|
5
|
+
from vellum.workflows.nodes.core.templating_node import TemplatingNode
|
|
5
6
|
from vellum.workflows.nodes.displayable.final_output_node import FinalOutputNode
|
|
6
7
|
from vellum.workflows.nodes.displayable.inline_prompt_node import InlinePromptNode
|
|
7
8
|
from vellum.workflows.references.output import OutputReference
|
|
8
9
|
from vellum.workflows.state.base import BaseState
|
|
10
|
+
from vellum.workflows.types.core import Json
|
|
9
11
|
|
|
10
12
|
|
|
11
13
|
def test_final_output_node__mismatched_output_type_should_raise_exception_when_ran():
|
|
@@ -85,3 +87,29 @@ def test_final_output_node__dict_and_Dict_should_be_compatible():
|
|
|
85
87
|
DictOutputNode.__validate__()
|
|
86
88
|
except ValueError as e:
|
|
87
89
|
pytest.fail(f"Validation should not raise an exception for dict/Dict compatibility: {e}")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def test_final_output_node__any_output_type_should_accept_json():
|
|
93
|
+
"""
|
|
94
|
+
Tests that FinalOutputNode with Any output type accepts a TemplatingNode with Json output type.
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
# GIVEN a TemplatingNode with Json output type
|
|
98
|
+
class JsonTemplatingNode(TemplatingNode[BaseState, Json]):
|
|
99
|
+
"""Templating node that outputs Json."""
|
|
100
|
+
|
|
101
|
+
template = '{"key": "value"}'
|
|
102
|
+
|
|
103
|
+
# AND a FinalOutputNode with Any output type referencing the TemplatingNode
|
|
104
|
+
class AnyOutputNode(FinalOutputNode[BaseState, Any]):
|
|
105
|
+
"""Output with Any type."""
|
|
106
|
+
|
|
107
|
+
class Outputs(FinalOutputNode.Outputs):
|
|
108
|
+
value = JsonTemplatingNode.Outputs.result
|
|
109
|
+
|
|
110
|
+
# WHEN attempting to validate the node class
|
|
111
|
+
# THEN validation should pass without raising an exception
|
|
112
|
+
try:
|
|
113
|
+
AnyOutputNode.__validate__()
|
|
114
|
+
except ValueError as e:
|
|
115
|
+
pytest.fail(f"Validation should not raise an exception when Any accepts Json: {e}")
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
from typing import Any, ClassVar, Dict, Generic
|
|
2
|
+
|
|
3
|
+
from vellum.workflows.descriptors.base import BaseDescriptor
|
|
4
|
+
from vellum.workflows.descriptors.utils import resolve_value
|
|
5
|
+
from vellum.workflows.errors import WorkflowErrorCode
|
|
6
|
+
from vellum.workflows.exceptions import NodeException
|
|
7
|
+
from vellum.workflows.nodes.bases.base import BaseNode
|
|
8
|
+
from vellum.workflows.outputs import BaseOutputs
|
|
9
|
+
from vellum.workflows.types.generics import StateType
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SetStateNode(BaseNode[StateType], Generic[StateType]):
|
|
13
|
+
"""
|
|
14
|
+
A node that sets multiple state values at once.
|
|
15
|
+
|
|
16
|
+
This node accepts a dictionary mapping state paths to values and sets them all.
|
|
17
|
+
Values can be actual values or descriptors.
|
|
18
|
+
|
|
19
|
+
Example:
|
|
20
|
+
operations = {
|
|
21
|
+
"chat_history": State.chat_history.concat(LazyReference("MyAgentNode.Outputs.chat_history")),
|
|
22
|
+
"counter": State.counter + 1,
|
|
23
|
+
"total_tokens": State.user_tokens.add(State.assistant_tokens)
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
operations: Dictionary mapping state attribute names to values (descriptors or actual values)
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
# Dictionary mapping state paths to values
|
|
31
|
+
operations: ClassVar[Dict[str, Any]] = {}
|
|
32
|
+
|
|
33
|
+
class Outputs(BaseOutputs):
|
|
34
|
+
"""
|
|
35
|
+
The outputs of the SetStateNode.
|
|
36
|
+
|
|
37
|
+
result: Dict[str, Any] - Dictionary of all state updates
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
result: Dict[str, Any]
|
|
41
|
+
|
|
42
|
+
def run(self) -> Outputs:
|
|
43
|
+
"""
|
|
44
|
+
Run the node and set all the state values.
|
|
45
|
+
Resolves descriptors to their actual values before setting state.
|
|
46
|
+
"""
|
|
47
|
+
# First pass: validate and resolve all operations without mutating state
|
|
48
|
+
resolved_updates: Dict[str, Any] = {}
|
|
49
|
+
for path, value in self.operations.items():
|
|
50
|
+
# Validate the state attribute exists prior to any mutation
|
|
51
|
+
if not hasattr(self.state, path):
|
|
52
|
+
raise NodeException(
|
|
53
|
+
f"State does not have attribute '{path}'. "
|
|
54
|
+
f"Only existing state attributes can be set via SetStateNode.",
|
|
55
|
+
code=WorkflowErrorCode.INVALID_STATE,
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
# Resolve the value if it's a descriptor against the current (unmodified) state
|
|
59
|
+
if isinstance(value, BaseDescriptor):
|
|
60
|
+
resolved_value = resolve_value(value, self.state)
|
|
61
|
+
else:
|
|
62
|
+
resolved_value = value
|
|
63
|
+
|
|
64
|
+
resolved_updates[path] = resolved_value
|
|
65
|
+
|
|
66
|
+
# Second pass: apply the resolved updates to the state atomically
|
|
67
|
+
with self.state.__atomic__():
|
|
68
|
+
for path, resolved_value in resolved_updates.items():
|
|
69
|
+
setattr(self.state, path, resolved_value)
|
|
70
|
+
|
|
71
|
+
return self.Outputs(result=resolved_updates)
|
|
File without changes
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from typing import List
|
|
3
|
+
|
|
4
|
+
from vellum import ChatMessage
|
|
5
|
+
from vellum.workflows.errors import WorkflowErrorCode
|
|
6
|
+
from vellum.workflows.exceptions import NodeException
|
|
7
|
+
from vellum.workflows.nodes.displayable.set_state_node import SetStateNode
|
|
8
|
+
from vellum.workflows.state.base import BaseState
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def test_set_state_node_with_single_value():
|
|
12
|
+
"""Test that SetStateNode can set a single state value."""
|
|
13
|
+
|
|
14
|
+
# GIVEN a state with a string field
|
|
15
|
+
class TestState(BaseState):
|
|
16
|
+
message: str = ""
|
|
17
|
+
|
|
18
|
+
# AND a SetStateNode that sets a message
|
|
19
|
+
class SetMessage(SetStateNode[TestState]):
|
|
20
|
+
operations = {"message": "Hello, World!"}
|
|
21
|
+
|
|
22
|
+
# WHEN we run the node
|
|
23
|
+
state = TestState()
|
|
24
|
+
node = SetMessage(state=state)
|
|
25
|
+
outputs = node.run()
|
|
26
|
+
|
|
27
|
+
# THEN the result should be the set value
|
|
28
|
+
assert outputs.result == {"message": "Hello, World!"}
|
|
29
|
+
|
|
30
|
+
# AND the state should be updated
|
|
31
|
+
assert state.message == "Hello, World!"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def test_set_state_node_with_multiple_values():
|
|
35
|
+
"""Test that SetStateNode can set multiple state values at once with expressions."""
|
|
36
|
+
|
|
37
|
+
# GIVEN a state with multiple fields
|
|
38
|
+
class TestState(BaseState):
|
|
39
|
+
counter: int = 0
|
|
40
|
+
total_tokens: int = 0
|
|
41
|
+
user_tokens: int = 5
|
|
42
|
+
assistant_tokens: int = 10
|
|
43
|
+
|
|
44
|
+
# AND a SetStateNode that sets multiple values with expressions using add
|
|
45
|
+
class UpdateMultipleState(SetStateNode[TestState]):
|
|
46
|
+
operations = {
|
|
47
|
+
"counter": 1,
|
|
48
|
+
"total_tokens": TestState.user_tokens + TestState.assistant_tokens,
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
# WHEN we run the node
|
|
52
|
+
state = TestState()
|
|
53
|
+
node = UpdateMultipleState(state=state)
|
|
54
|
+
outputs = node.run()
|
|
55
|
+
|
|
56
|
+
# THEN all state values should be updated
|
|
57
|
+
assert state.counter == 1
|
|
58
|
+
assert state.total_tokens == 15
|
|
59
|
+
|
|
60
|
+
# AND the result should contain all updates
|
|
61
|
+
assert outputs.result == {"counter": 1, "total_tokens": 15}
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def test_set_state_node_with_chat_history_and_concat():
|
|
65
|
+
"""Test that SetStateNode can concatenate chat history using concat."""
|
|
66
|
+
|
|
67
|
+
# GIVEN a state with chat history
|
|
68
|
+
class TestState(BaseState):
|
|
69
|
+
chat_history: List[ChatMessage] = []
|
|
70
|
+
|
|
71
|
+
# AND initial chat history in state
|
|
72
|
+
initial_messages = [ChatMessage(role="ASSISTANT", text="Hi there")]
|
|
73
|
+
|
|
74
|
+
# AND a SetStateNode that uses concat() method to add to chat history
|
|
75
|
+
class UpdateChatHistory(SetStateNode[TestState]):
|
|
76
|
+
operations = {"chat_history": TestState.chat_history.concat([ChatMessage(role="USER", text="Hello")])}
|
|
77
|
+
|
|
78
|
+
# WHEN we run the node with initial state
|
|
79
|
+
state = TestState(chat_history=initial_messages)
|
|
80
|
+
|
|
81
|
+
node = UpdateChatHistory(state=state)
|
|
82
|
+
outputs = node.run()
|
|
83
|
+
|
|
84
|
+
# THEN the state should have concatenated chat history
|
|
85
|
+
assert len(state.chat_history) == 2
|
|
86
|
+
assert state.chat_history[0].role == "ASSISTANT"
|
|
87
|
+
assert state.chat_history[1].role == "USER"
|
|
88
|
+
assert state.chat_history[1].text == "Hello"
|
|
89
|
+
|
|
90
|
+
# AND the result should contain the updated chat history
|
|
91
|
+
assert outputs.result == {
|
|
92
|
+
"chat_history": [ChatMessage(role="ASSISTANT", text="Hi there"), ChatMessage(role="USER", text="Hello")]
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def test_set_state_node_with_empty_dict():
|
|
97
|
+
"""Test that SetStateNode handles empty operations gracefully."""
|
|
98
|
+
|
|
99
|
+
# GIVEN a state
|
|
100
|
+
class TestState(BaseState):
|
|
101
|
+
counter: int = 0
|
|
102
|
+
|
|
103
|
+
# AND a SetStateNode with empty operations
|
|
104
|
+
class EmptySetState(SetStateNode[TestState]):
|
|
105
|
+
operations = {}
|
|
106
|
+
|
|
107
|
+
# WHEN we run the node
|
|
108
|
+
state = TestState()
|
|
109
|
+
node = EmptySetState(state=state)
|
|
110
|
+
outputs = node.run()
|
|
111
|
+
|
|
112
|
+
# THEN the result should be an empty dict
|
|
113
|
+
assert outputs.result == {}
|
|
114
|
+
|
|
115
|
+
# AND state should be unchanged
|
|
116
|
+
assert state.counter == 0
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def test_set_state_node_modifies_existing_values():
|
|
120
|
+
"""Test that SetStateNode can modify existing state values."""
|
|
121
|
+
|
|
122
|
+
# GIVEN a state with existing values
|
|
123
|
+
class TestState(BaseState):
|
|
124
|
+
counter: int = 10
|
|
125
|
+
message: str = "old"
|
|
126
|
+
|
|
127
|
+
# AND a SetStateNode that updates existing values
|
|
128
|
+
class UpdateState(SetStateNode[TestState]):
|
|
129
|
+
operations = {"counter": 20, "message": "new"}
|
|
130
|
+
|
|
131
|
+
# WHEN we run the node
|
|
132
|
+
state = TestState()
|
|
133
|
+
node = UpdateState(state=state)
|
|
134
|
+
outputs = node.run()
|
|
135
|
+
|
|
136
|
+
# THEN state should be updated
|
|
137
|
+
assert state.counter == 20
|
|
138
|
+
assert state.message == "new"
|
|
139
|
+
|
|
140
|
+
# AND result should contain the new values with the correct structure
|
|
141
|
+
assert outputs.result == {"counter": 20, "message": "new"}
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def test_set_state_not_existing_value():
|
|
145
|
+
"""Test that SetStateNode raises an error when trying to set a value that doesn't exist in the state."""
|
|
146
|
+
|
|
147
|
+
# GIVEN a state
|
|
148
|
+
class TestState(BaseState):
|
|
149
|
+
counter: int = 0
|
|
150
|
+
|
|
151
|
+
# AND a SetStateNode that tries to set a value that doesn't exist in the state
|
|
152
|
+
class SetState(SetStateNode[TestState]):
|
|
153
|
+
operations = {"unknown_value": 10}
|
|
154
|
+
|
|
155
|
+
# WHEN we run the node
|
|
156
|
+
state = TestState()
|
|
157
|
+
node = SetState(state=state)
|
|
158
|
+
|
|
159
|
+
# THEN it should raise a NodeException
|
|
160
|
+
with pytest.raises(NodeException) as exc_info:
|
|
161
|
+
node.run()
|
|
162
|
+
|
|
163
|
+
# AND the error should be INVALID_STATE and the message should mention the non-existent attribute
|
|
164
|
+
assert exc_info.value.code == WorkflowErrorCode.INVALID_STATE
|
|
165
|
+
assert (
|
|
166
|
+
"State does not have attribute 'unknown_value'. Only existing state attributes can be set via SetStateNode."
|
|
167
|
+
== str(exc_info.value)
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def test_set_state_node_atomic_order_independent_resolution():
|
|
172
|
+
"""Both operations resolve against the original state before any mutation."""
|
|
173
|
+
|
|
174
|
+
class TestState(BaseState):
|
|
175
|
+
a: int = 1
|
|
176
|
+
b: int = 0
|
|
177
|
+
|
|
178
|
+
class UpdateAB(SetStateNode[TestState]):
|
|
179
|
+
# Both should see original a=1, so both resolve to 2
|
|
180
|
+
operations = {
|
|
181
|
+
"a": TestState.a + 1,
|
|
182
|
+
"b": TestState.a + 1,
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
state = TestState()
|
|
186
|
+
node = UpdateAB(state=state)
|
|
187
|
+
outputs = node.run()
|
|
188
|
+
|
|
189
|
+
assert state.a == 2
|
|
190
|
+
assert state.b == 2
|
|
191
|
+
assert outputs.result == {"a": 2, "b": 2}
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def test_set_state_node_no_partial_update_on_error():
|
|
195
|
+
"""If a later operation is invalid, no earlier changes should be applied."""
|
|
196
|
+
|
|
197
|
+
class TestState(BaseState):
|
|
198
|
+
a: int = 5
|
|
199
|
+
|
|
200
|
+
class PartialFail(SetStateNode[TestState]):
|
|
201
|
+
operations = {
|
|
202
|
+
"a": 42, # would change a
|
|
203
|
+
"missing": 1, # invalid attribute triggers NodeException
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
state = TestState()
|
|
207
|
+
node = PartialFail(state=state)
|
|
208
|
+
|
|
209
|
+
with pytest.raises(NodeException):
|
|
210
|
+
node.run()
|
|
211
|
+
|
|
212
|
+
assert state.a == 5
|
vellum/workflows/sandbox.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Generic, Optional, Sequence, Union
|
|
1
|
+
from typing import Any, Dict, Generic, Optional, Sequence, Union
|
|
2
2
|
|
|
3
3
|
import dotenv
|
|
4
4
|
|
|
@@ -51,11 +51,21 @@ class WorkflowSandboxRunner(Generic[WorkflowType]):
|
|
|
51
51
|
|
|
52
52
|
selected_inputs = self._inputs[index]
|
|
53
53
|
|
|
54
|
+
raw_inputs: Union[BaseInputs, Dict[str, Any]]
|
|
54
55
|
if isinstance(selected_inputs, DatasetRow):
|
|
55
|
-
|
|
56
|
+
raw_inputs = selected_inputs.inputs
|
|
57
|
+
else:
|
|
58
|
+
raw_inputs = selected_inputs
|
|
59
|
+
|
|
60
|
+
inputs_for_stream: BaseInputs
|
|
61
|
+
if isinstance(raw_inputs, dict):
|
|
62
|
+
inputs_class = type(self._workflow).get_inputs_class()
|
|
63
|
+
inputs_for_stream = inputs_class(**raw_inputs)
|
|
64
|
+
else:
|
|
65
|
+
inputs_for_stream = raw_inputs
|
|
56
66
|
|
|
57
67
|
events = self._workflow.stream(
|
|
58
|
-
inputs=
|
|
68
|
+
inputs=inputs_for_stream,
|
|
59
69
|
event_filter=root_workflow_event_filter,
|
|
60
70
|
)
|
|
61
71
|
|
|
@@ -126,3 +126,23 @@ def test_dataset_row_with_empty_inputs():
|
|
|
126
126
|
|
|
127
127
|
assert serialized_dict["label"] == "test_label"
|
|
128
128
|
assert serialized_dict["inputs"] == {}
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def test_dataset_row_with_dict_inputs():
|
|
132
|
+
"""
|
|
133
|
+
Test that DatasetRow can accept a dict for the inputs parameter.
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
# GIVEN a dict with input data
|
|
137
|
+
inputs_dict = {"message": "Hello World", "count": 42}
|
|
138
|
+
|
|
139
|
+
dataset_row = DatasetRow(label="test_label", inputs=inputs_dict)
|
|
140
|
+
|
|
141
|
+
# THEN the inputs should remain as a dict
|
|
142
|
+
assert isinstance(dataset_row.inputs, dict)
|
|
143
|
+
|
|
144
|
+
# AND the serialized dict should contain the correct data
|
|
145
|
+
serialized_dict = dataset_row.model_dump()
|
|
146
|
+
assert serialized_dict["label"] == "test_label"
|
|
147
|
+
assert serialized_dict["inputs"]["message"] == "Hello World"
|
|
148
|
+
assert serialized_dict["inputs"]["count"] == 42
|
|
@@ -2,6 +2,7 @@ import pytest
|
|
|
2
2
|
from typing import List
|
|
3
3
|
|
|
4
4
|
from vellum.workflows.inputs.base import BaseInputs
|
|
5
|
+
from vellum.workflows.inputs.dataset_row import DatasetRow
|
|
5
6
|
from vellum.workflows.nodes.bases.base import BaseNode
|
|
6
7
|
from vellum.workflows.sandbox import WorkflowSandboxRunner
|
|
7
8
|
from vellum.workflows.state.base import BaseState
|
|
@@ -60,3 +61,42 @@ def test_sandbox_runner__happy_path(mock_logger, run_kwargs, expected_last_log):
|
|
|
60
61
|
"----------------------------------",
|
|
61
62
|
expected_last_log,
|
|
62
63
|
]
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def test_sandbox_runner_with_dict_inputs(mock_logger):
|
|
67
|
+
"""
|
|
68
|
+
Test that WorkflowSandboxRunner can run with dict inputs in DatasetRow.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
# GIVEN we capture the logs to stdout
|
|
72
|
+
logs = []
|
|
73
|
+
mock_logger.return_value.info.side_effect = lambda msg: logs.append(msg)
|
|
74
|
+
|
|
75
|
+
class Inputs(BaseInputs):
|
|
76
|
+
message: str
|
|
77
|
+
|
|
78
|
+
class StartNode(BaseNode):
|
|
79
|
+
class Outputs(BaseNode.Outputs):
|
|
80
|
+
result = Inputs.message
|
|
81
|
+
|
|
82
|
+
class Workflow(BaseWorkflow[Inputs, BaseState]):
|
|
83
|
+
graph = StartNode
|
|
84
|
+
|
|
85
|
+
class Outputs(BaseWorkflow.Outputs):
|
|
86
|
+
final_output = StartNode.Outputs.result
|
|
87
|
+
|
|
88
|
+
dataset = [
|
|
89
|
+
DatasetRow(label="test_row", inputs={"message": "Hello from dict"}),
|
|
90
|
+
]
|
|
91
|
+
|
|
92
|
+
# WHEN we run the sandbox with the DatasetRow containing dict inputs
|
|
93
|
+
runner = WorkflowSandboxRunner(workflow=Workflow(), dataset=dataset)
|
|
94
|
+
runner.run()
|
|
95
|
+
|
|
96
|
+
assert logs == [
|
|
97
|
+
"Just started Node: StartNode",
|
|
98
|
+
"Just finished Node: StartNode",
|
|
99
|
+
"Workflow fulfilled!",
|
|
100
|
+
"----------------------------------",
|
|
101
|
+
"final_output: Hello from dict",
|
|
102
|
+
]
|
vellum/workflows/tests/triggers/{test_vellum_integration_trigger.py → test_integration_trigger.py}
RENAMED
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
"""Tests for
|
|
1
|
+
"""Tests for IntegrationTrigger."""
|
|
2
2
|
|
|
3
3
|
import pytest
|
|
4
4
|
|
|
5
5
|
from vellum.workflows.constants import VellumIntegrationProviderType
|
|
6
6
|
from vellum.workflows.references.trigger import TriggerAttributeReference
|
|
7
|
-
from vellum.workflows.triggers.
|
|
7
|
+
from vellum.workflows.triggers.integration import IntegrationTrigger
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
def test_requires_config():
|
|
@@ -13,27 +13,27 @@ def test_requires_config():
|
|
|
13
13
|
# This should fail - no Config class
|
|
14
14
|
with pytest.raises(TypeError, match="Config"):
|
|
15
15
|
|
|
16
|
-
class BadTrigger1(
|
|
16
|
+
class BadTrigger1(IntegrationTrigger):
|
|
17
17
|
message: str
|
|
18
18
|
# Missing Config!
|
|
19
19
|
|
|
20
20
|
# This should fail - incomplete Config
|
|
21
21
|
with pytest.raises(TypeError, match="provider"):
|
|
22
22
|
|
|
23
|
-
class BadTrigger2(
|
|
23
|
+
class BadTrigger2(IntegrationTrigger):
|
|
24
24
|
message: str
|
|
25
25
|
|
|
26
|
-
class Config(
|
|
26
|
+
class Config(IntegrationTrigger.Config):
|
|
27
27
|
integration_name = "SLACK"
|
|
28
28
|
slug = "slack_new_message"
|
|
29
29
|
# Missing provider!
|
|
30
30
|
|
|
31
31
|
# This should work
|
|
32
|
-
class GoodTrigger(
|
|
32
|
+
class GoodTrigger(IntegrationTrigger):
|
|
33
33
|
message: str
|
|
34
34
|
user: str
|
|
35
35
|
|
|
36
|
-
class Config(
|
|
36
|
+
class Config(IntegrationTrigger.Config):
|
|
37
37
|
provider = VellumIntegrationProviderType.COMPOSIO
|
|
38
38
|
integration_name = "SLACK"
|
|
39
39
|
slug = "slack_new_message"
|
|
@@ -44,12 +44,12 @@ def test_requires_config():
|
|
|
44
44
|
def test_top_level_annotations_create_references():
|
|
45
45
|
"""Top-level type annotations (webhook event attributes) automatically create TriggerAttributeReference."""
|
|
46
46
|
|
|
47
|
-
class SlackTrigger(
|
|
47
|
+
class SlackTrigger(IntegrationTrigger):
|
|
48
48
|
message: str
|
|
49
49
|
user: str
|
|
50
50
|
timestamp: float
|
|
51
51
|
|
|
52
|
-
class Config(
|
|
52
|
+
class Config(IntegrationTrigger.Config):
|
|
53
53
|
provider = VellumIntegrationProviderType.COMPOSIO
|
|
54
54
|
integration_name = "SLACK"
|
|
55
55
|
slug = "slack_new_message"
|
|
@@ -69,18 +69,18 @@ def test_top_level_annotations_create_references():
|
|
|
69
69
|
def test_attribute_ids_include_class_name():
|
|
70
70
|
"""Attribute IDs should include class name (like nodes)."""
|
|
71
71
|
|
|
72
|
-
class Trigger1(
|
|
72
|
+
class Trigger1(IntegrationTrigger):
|
|
73
73
|
message: str
|
|
74
74
|
|
|
75
|
-
class Config(
|
|
75
|
+
class Config(IntegrationTrigger.Config):
|
|
76
76
|
provider = VellumIntegrationProviderType.COMPOSIO
|
|
77
77
|
integration_name = "SLACK"
|
|
78
78
|
slug = "slack_new_message"
|
|
79
79
|
|
|
80
|
-
class Trigger2(
|
|
80
|
+
class Trigger2(IntegrationTrigger):
|
|
81
81
|
message: str
|
|
82
82
|
|
|
83
|
-
class Config(
|
|
83
|
+
class Config(IntegrationTrigger.Config):
|
|
84
84
|
provider = VellumIntegrationProviderType.COMPOSIO
|
|
85
85
|
integration_name = "SLACK"
|
|
86
86
|
slug = "slack_new_message"
|
|
@@ -93,12 +93,12 @@ def test_attribute_ids_include_class_name():
|
|
|
93
93
|
def test_populates_dynamic_attributes():
|
|
94
94
|
"""Trigger dynamically populates attributes from event_data keys."""
|
|
95
95
|
|
|
96
|
-
class GithubPushTrigger(
|
|
96
|
+
class GithubPushTrigger(IntegrationTrigger):
|
|
97
97
|
repository: str
|
|
98
98
|
branch: str
|
|
99
99
|
commits: list
|
|
100
100
|
|
|
101
|
-
class Config(
|
|
101
|
+
class Config(IntegrationTrigger.Config):
|
|
102
102
|
provider = VellumIntegrationProviderType.COMPOSIO
|
|
103
103
|
integration_name = "GITHUB"
|
|
104
104
|
slug = "github_push_event"
|
|
@@ -109,7 +109,7 @@ def test_populates_dynamic_attributes():
|
|
|
109
109
|
"commits": ["abc123", "def456"],
|
|
110
110
|
}
|
|
111
111
|
|
|
112
|
-
trigger = GithubPushTrigger(event_data
|
|
112
|
+
trigger = GithubPushTrigger(**event_data)
|
|
113
113
|
|
|
114
114
|
assert trigger.repository == "vellum-ai/workflows"
|
|
115
115
|
assert trigger.branch == "main"
|
|
@@ -119,17 +119,17 @@ def test_populates_dynamic_attributes():
|
|
|
119
119
|
def test_to_trigger_attribute_values():
|
|
120
120
|
"""to_trigger_attribute_values returns correct attribute mappings."""
|
|
121
121
|
|
|
122
|
-
class SlackTrigger(
|
|
122
|
+
class SlackTrigger(IntegrationTrigger):
|
|
123
123
|
message: str
|
|
124
124
|
channel: str
|
|
125
125
|
|
|
126
|
-
class Config(
|
|
126
|
+
class Config(IntegrationTrigger.Config):
|
|
127
127
|
provider = VellumIntegrationProviderType.COMPOSIO
|
|
128
128
|
integration_name = "SLACK"
|
|
129
129
|
slug = "slack_new_message"
|
|
130
130
|
|
|
131
131
|
event_data = {"message": "Hello", "channel": "C123"}
|
|
132
|
-
trigger = SlackTrigger(event_data
|
|
132
|
+
trigger = SlackTrigger(**event_data)
|
|
133
133
|
|
|
134
134
|
attr_values = trigger.to_trigger_attribute_values()
|
|
135
135
|
|
|
@@ -142,15 +142,15 @@ def test_to_trigger_attribute_values():
|
|
|
142
142
|
def test_empty_event_data():
|
|
143
143
|
"""Trigger handles empty event data gracefully."""
|
|
144
144
|
|
|
145
|
-
class SlackTrigger(
|
|
145
|
+
class SlackTrigger(IntegrationTrigger):
|
|
146
146
|
message: str
|
|
147
147
|
|
|
148
|
-
class Config(
|
|
148
|
+
class Config(IntegrationTrigger.Config):
|
|
149
149
|
provider = VellumIntegrationProviderType.COMPOSIO
|
|
150
150
|
integration_name = "SLACK"
|
|
151
151
|
slug = "slack_new_message"
|
|
152
152
|
|
|
153
|
-
trigger = SlackTrigger(
|
|
153
|
+
trigger = SlackTrigger()
|
|
154
154
|
|
|
155
155
|
attr_values = trigger.to_trigger_attribute_values()
|
|
156
156
|
assert attr_values == {}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from vellum.workflows.triggers.base import BaseTrigger
|
|
2
2
|
from vellum.workflows.triggers.integration import IntegrationTrigger
|
|
3
3
|
from vellum.workflows.triggers.manual import ManualTrigger
|
|
4
|
-
from vellum.workflows.triggers.
|
|
4
|
+
from vellum.workflows.triggers.schedule import ScheduleTrigger
|
|
5
5
|
|
|
6
|
-
__all__ = ["BaseTrigger", "IntegrationTrigger", "ManualTrigger", "
|
|
6
|
+
__all__ = ["BaseTrigger", "IntegrationTrigger", "ManualTrigger", "ScheduleTrigger"]
|
|
@@ -1,14 +1,16 @@
|
|
|
1
1
|
from abc import ABC, ABCMeta
|
|
2
2
|
import inspect
|
|
3
|
+
from uuid import UUID
|
|
3
4
|
from typing import TYPE_CHECKING, Any, ClassVar, Dict, Iterator, Tuple, Type, cast, get_origin
|
|
4
5
|
|
|
6
|
+
from vellum.workflows.references.trigger import TriggerAttributeReference
|
|
7
|
+
from vellum.workflows.types.utils import get_class_attr_names, infer_types
|
|
8
|
+
from vellum.workflows.utils.uuids import uuid4_from_hash
|
|
9
|
+
|
|
5
10
|
if TYPE_CHECKING:
|
|
6
11
|
from vellum.workflows.graph.graph import Graph, GraphTarget
|
|
7
12
|
from vellum.workflows.state.base import BaseState
|
|
8
13
|
|
|
9
|
-
from vellum.workflows.references.trigger import TriggerAttributeReference
|
|
10
|
-
from vellum.workflows.types.utils import get_class_attr_names, infer_types
|
|
11
|
-
|
|
12
14
|
|
|
13
15
|
def _is_annotated(cls: Type, name: str) -> bool:
|
|
14
16
|
annotations = getattr(cls, "__annotations__", {})
|
|
@@ -28,7 +30,11 @@ def _is_annotated(cls: Type, name: str) -> bool:
|
|
|
28
30
|
class BaseTriggerMeta(ABCMeta):
|
|
29
31
|
def __new__(mcs, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]) -> Any:
|
|
30
32
|
cls = super().__new__(mcs, name, bases, dct)
|
|
31
|
-
|
|
33
|
+
trigger_class = cast(Type["BaseTrigger"], cls)
|
|
34
|
+
|
|
35
|
+
trigger_class.__id__ = uuid4_from_hash(f"{trigger_class.__module__}.{trigger_class.__qualname__}")
|
|
36
|
+
|
|
37
|
+
return trigger_class
|
|
32
38
|
|
|
33
39
|
"""
|
|
34
40
|
Metaclass for BaseTrigger that enables class-level >> operator.
|
|
@@ -192,6 +198,18 @@ class BaseTrigger(ABC, metaclass=BaseTriggerMeta):
|
|
|
192
198
|
Like nodes, triggers work at the class level only. Do not instantiate triggers.
|
|
193
199
|
"""
|
|
194
200
|
|
|
201
|
+
__id__: UUID
|
|
202
|
+
|
|
203
|
+
def __init__(self, **kwargs: Any):
|
|
204
|
+
"""
|
|
205
|
+
Initialize trigger with event data.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
**kwargs: Arbitrary keyword arguments passed during trigger instantiation.
|
|
209
|
+
Subclasses may use these to populate trigger attributes.
|
|
210
|
+
"""
|
|
211
|
+
self._event_data = kwargs
|
|
212
|
+
|
|
195
213
|
@classmethod
|
|
196
214
|
def attribute_references(cls) -> Dict[str, "TriggerAttributeReference[Any]"]:
|
|
197
215
|
"""Return class-level trigger attribute descriptors keyed by attribute name."""
|