vellum-ai 0.13.27__py3-none-any.whl → 0.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. vellum/client/core/client_wrapper.py +1 -1
  2. vellum/workflows/constants.py +8 -3
  3. vellum/workflows/descriptors/exceptions.py +2 -0
  4. vellum/workflows/descriptors/tests/test_utils.py +21 -0
  5. vellum/workflows/descriptors/utils.py +3 -3
  6. vellum/workflows/errors/types.py +4 -1
  7. vellum/workflows/expressions/accessor.py +4 -3
  8. vellum/workflows/expressions/begins_with.py +3 -2
  9. vellum/workflows/expressions/between.py +4 -3
  10. vellum/workflows/expressions/coalesce_expression.py +2 -2
  11. vellum/workflows/expressions/contains.py +10 -2
  12. vellum/workflows/expressions/does_not_begin_with.py +3 -2
  13. vellum/workflows/expressions/does_not_contain.py +6 -2
  14. vellum/workflows/expressions/does_not_end_with.py +3 -2
  15. vellum/workflows/expressions/ends_with.py +3 -2
  16. vellum/workflows/expressions/greater_than.py +3 -2
  17. vellum/workflows/expressions/greater_than_or_equal_to.py +3 -2
  18. vellum/workflows/expressions/in_.py +2 -1
  19. vellum/workflows/expressions/is_blank.py +2 -1
  20. vellum/workflows/expressions/is_nil.py +2 -2
  21. vellum/workflows/expressions/is_not_blank.py +2 -1
  22. vellum/workflows/expressions/is_not_nil.py +2 -2
  23. vellum/workflows/expressions/is_not_undefined.py +2 -2
  24. vellum/workflows/expressions/is_undefined.py +2 -2
  25. vellum/workflows/expressions/less_than.py +3 -2
  26. vellum/workflows/expressions/less_than_or_equal_to.py +3 -2
  27. vellum/workflows/expressions/not_between.py +4 -3
  28. vellum/workflows/expressions/not_in.py +2 -1
  29. vellum/workflows/nodes/bases/base.py +21 -7
  30. vellum/workflows/nodes/bases/tests/test_base_node.py +84 -0
  31. vellum/workflows/nodes/core/inline_subworkflow_node/node.py +3 -3
  32. vellum/workflows/nodes/core/map_node/node.py +5 -0
  33. vellum/workflows/nodes/core/map_node/tests/test_node.py +22 -0
  34. vellum/workflows/nodes/displayable/bases/api_node/node.py +8 -3
  35. vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +19 -9
  36. vellum/workflows/nodes/displayable/code_execution_node/tests/test_code_execution_node.py +68 -2
  37. vellum/workflows/nodes/displayable/code_execution_node/utils.py +30 -7
  38. vellum/workflows/outputs/base.py +21 -19
  39. vellum/workflows/ports/port.py +14 -5
  40. vellum/workflows/references/external_input.py +2 -2
  41. vellum/workflows/references/lazy.py +2 -2
  42. vellum/workflows/references/output.py +7 -7
  43. vellum/workflows/runner/runner.py +20 -15
  44. vellum/workflows/state/base.py +2 -2
  45. vellum/workflows/state/tests/test_state.py +7 -11
  46. vellum/workflows/utils/vellum_variables.py +3 -0
  47. vellum/workflows/workflows/base.py +20 -0
  48. vellum/workflows/workflows/tests/__init__.py +0 -0
  49. vellum/workflows/workflows/tests/test_base_workflow.py +80 -0
  50. {vellum_ai-0.13.27.dist-info → vellum_ai-0.14.0.dist-info}/METADATA +2 -1
  51. {vellum_ai-0.13.27.dist-info → vellum_ai-0.14.0.dist-info}/RECORD +57 -54
  52. vellum_cli/push.py +15 -1
  53. vellum_cli/tests/test_push.py +44 -0
  54. vellum_ee/workflows/display/nodes/base_node_display.py +2 -2
  55. {vellum_ai-0.13.27.dist-info → vellum_ai-0.14.0.dist-info}/LICENSE +0 -0
  56. {vellum_ai-0.13.27.dist-info → vellum_ai-0.14.0.dist-info}/WHEEL +0 -0
  57. {vellum_ai-0.13.27.dist-info → vellum_ai-0.14.0.dist-info}/entry_points.txt +0 -0
@@ -5,6 +5,7 @@ from vellum.client.types.string_vellum_value_request import StringVellumValueReq
5
5
  from vellum.core.pydantic_utilities import UniversalBaseModel
6
6
  from vellum.workflows.inputs.base import BaseInputs
7
7
  from vellum.workflows.nodes.bases.base import BaseNode
8
+ from vellum.workflows.outputs.base import BaseOutputs
8
9
  from vellum.workflows.state.base import BaseState, StateMeta
9
10
 
10
11
 
@@ -148,3 +149,86 @@ def test_base_node__node_resolution__descriptor_in_fern_pydantic():
148
149
  node = SomeNode(state=State(foo="bar"))
149
150
 
150
151
  assert node.model.value == "bar"
152
+
153
+
154
+ def test_base_node__inherit_base_outputs():
155
+ class MyNode(BaseNode):
156
+ class Outputs:
157
+ foo: str
158
+
159
+ def run(self):
160
+ return self.Outputs(foo="bar") # type: ignore
161
+
162
+ # TEST that the Outputs class is a subclass of BaseOutputs
163
+ assert issubclass(MyNode.Outputs, BaseOutputs)
164
+
165
+ # TEST that the Outputs class does not inherit from object
166
+ assert object not in MyNode.Outputs.__bases__
167
+
168
+ # TEST that the Outputs class has the correct attributes
169
+ assert hasattr(MyNode.Outputs, "foo")
170
+
171
+ # WHEN the node is run
172
+ node = MyNode()
173
+ outputs = node.run()
174
+
175
+ # THEN the outputs should be correct
176
+ assert outputs.foo == "bar"
177
+
178
+
179
+ def test_child_node__inherits_base_outputs_when_no_parent_outputs():
180
+ class ParentNode(BaseNode): # No Outputs class here
181
+ pass
182
+
183
+ class ChildNode(ParentNode):
184
+ class Outputs:
185
+ foo: str
186
+
187
+ def run(self):
188
+ return self.Outputs(foo="bar") # type: ignore
189
+
190
+ # TEST that ChildNode.Outputs is a subclass of BaseOutputs (since ParentNode has no Outputs)
191
+ assert issubclass(ChildNode.Outputs, BaseOutputs)
192
+
193
+ # TEST that ChildNode.Outputs has the correct attributes
194
+ assert hasattr(ChildNode.Outputs, "foo")
195
+
196
+ # WHEN the node is run
197
+ node = ChildNode()
198
+ outputs = node.run()
199
+
200
+ # THEN the outputs should be correct
201
+ assert outputs.foo == "bar"
202
+
203
+
204
+ def test_outputs_preserves_non_object_bases():
205
+ class ParentNode(BaseNode):
206
+ class Outputs:
207
+ foo: str
208
+
209
+ class Foo:
210
+ bar: str
211
+
212
+ class ChildNode(ParentNode):
213
+ class Outputs(ParentNode.Outputs, Foo):
214
+ pass
215
+
216
+ def run(self):
217
+ return self.Outputs(foo="bar", bar="baz") # type: ignore
218
+
219
+ # TEST that Outputs is a subclass of Foo and ParentNode.Outputs
220
+ assert Foo in ChildNode.Outputs.__bases__, "Foo should be preserved in bases"
221
+ assert ParentNode.Outputs in ChildNode.Outputs.__bases__, "ParentNode.Outputs should be preserved in bases"
222
+ assert object not in ChildNode.Outputs.__bases__, "object should not be in bases"
223
+
224
+ # TEST that Outputs has the correct attributes
225
+ assert hasattr(ChildNode.Outputs, "foo")
226
+ assert hasattr(ChildNode.Outputs, "bar")
227
+
228
+ # WHEN Outputs is instantiated
229
+ node = ChildNode()
230
+ outputs = node.run()
231
+
232
+ # THEN the output values should be correct
233
+ assert outputs.foo == "bar"
234
+ assert outputs.bar == "baz"
@@ -1,6 +1,6 @@
1
1
  from typing import TYPE_CHECKING, Any, ClassVar, Dict, Generic, Iterator, Optional, Set, Tuple, Type, TypeVar, Union
2
2
 
3
- from vellum.workflows.constants import UNDEF
3
+ from vellum.workflows.constants import undefined
4
4
  from vellum.workflows.context import execution_context, get_parent_context
5
5
  from vellum.workflows.errors.types import WorkflowErrorCode
6
6
  from vellum.workflows.exceptions import NodeException
@@ -67,7 +67,7 @@ class InlineSubworkflowNode(
67
67
  """
68
68
 
69
69
  subworkflow: Type["BaseWorkflow[InputsType, InnerStateType]"]
70
- subworkflow_inputs: ClassVar[Union[EntityInputsInterface, BaseInputs, Type[UNDEF]]] = UNDEF
70
+ subworkflow_inputs: ClassVar[Union[EntityInputsInterface, BaseInputs, Type[undefined]]] = undefined
71
71
 
72
72
  def run(self) -> Iterator[BaseOutput]:
73
73
  with execution_context(parent_context=get_parent_context() or self._context.parent_context):
@@ -112,7 +112,7 @@ class InlineSubworkflowNode(
112
112
 
113
113
  def _compile_subworkflow_inputs(self) -> InputsType:
114
114
  inputs_class = self.subworkflow.get_inputs_class()
115
- if self.subworkflow_inputs is UNDEF:
115
+ if self.subworkflow_inputs is undefined:
116
116
  inputs_dict = {}
117
117
  for descriptor in inputs_class:
118
118
  if hasattr(self, descriptor.name):
@@ -66,6 +66,11 @@ class MapNode(BaseAdornmentNode[StateType], Generic[StateType, MapNodeItemType])
66
66
  for output_descripter in self.subworkflow.Outputs:
67
67
  mapped_items[output_descripter.name] = [None] * len(self.items)
68
68
 
69
+ if not self.items:
70
+ for output_name, output_list in mapped_items.items():
71
+ yield BaseOutput(name=output_name, value=output_list)
72
+ return
73
+
69
74
  self._event_queue: Queue[Tuple[int, WorkflowEvent]] = Queue()
70
75
  self._concurrency_queue: Queue[Thread] = Queue()
71
76
  fulfilled_iterations: List[bool] = []
@@ -63,3 +63,25 @@ def test_map_node__use_parallelism():
63
63
  # THEN the node should have ran in parallel
64
64
  run_time = (end_ts - start_ts) / 10**9
65
65
  assert run_time < 0.2
66
+
67
+
68
+ def test_map_node__empty_list():
69
+ # GIVEN a map node that is configured to use the parent's inputs and state
70
+ @MapNode.wrap(items=[])
71
+ class TestNode(BaseNode):
72
+ item = MapNode.SubworkflowInputs.item
73
+
74
+ class Outputs(BaseOutputs):
75
+ value: int
76
+
77
+ def run(self) -> Outputs:
78
+ time.sleep(0.03)
79
+ return self.Outputs(value=self.item + 1)
80
+
81
+ # WHEN the node is run
82
+ node = TestNode()
83
+ outputs = list(node.run())
84
+
85
+ # THEN the node should return an empty output
86
+ fulfilled_output = outputs[-1]
87
+ assert fulfilled_output == BaseOutput(name="value", value=[])
@@ -3,6 +3,7 @@ from typing import Any, Dict, Generic, Optional, Union
3
3
  from requests import Request, RequestException, Session
4
4
  from requests.exceptions import JSONDecodeError
5
5
 
6
+ from vellum.client import ApiError
6
7
  from vellum.client.types.vellum_secret import VellumSecret as ClientVellumSecret
7
8
  from vellum.workflows.constants import APIRequestMethod
8
9
  from vellum.workflows.errors.types import WorkflowErrorCode
@@ -83,9 +84,13 @@ class BaseAPINode(BaseNode, Generic[StateType]):
83
84
 
84
85
  def _vellum_execute_api(self, bearer_token, data, headers, method, url):
85
86
  client_vellum_secret = ClientVellumSecret(name=bearer_token.name) if bearer_token else None
86
- vellum_response = self._context.vellum_client.execute_api(
87
- url=url, method=method.value, body=data, headers=headers, bearer_token=client_vellum_secret
88
- )
87
+ try:
88
+ vellum_response = self._context.vellum_client.execute_api(
89
+ url=url, method=method.value, body=data, headers=headers, bearer_token=client_vellum_secret
90
+ )
91
+ except ApiError as e:
92
+ NodeException(f"Failed to prepare HTTP request: {e}", code=WorkflowErrorCode.NODE_EXECUTION)
93
+
89
94
  return self.Outputs(
90
95
  json=vellum_response.json_,
91
96
  headers={header: value for header, value in vellum_response.headers.items()},
@@ -40,16 +40,14 @@ class BasePromptNode(BaseNode, Generic[StateType]):
40
40
  try:
41
41
  prompt_event_stream = self._get_prompt_event_stream()
42
42
  except ApiError as e:
43
- if e.status_code and e.status_code >= 400 and e.status_code < 500 and isinstance(e.body, dict):
44
- raise NodeException(
45
- message=e.body.get("detail", "Failed to execute prompt"),
46
- code=WorkflowErrorCode.INVALID_INPUTS,
47
- ) from e
43
+ self._handle_api_error(e)
48
44
 
49
- raise NodeException(
50
- message="Failed to execute prompt",
51
- code=WorkflowErrorCode.INTERNAL_ERROR,
52
- ) from e
45
+ # We don't use the INITIATED event anyway, so we can just skip it
46
+ # and use the exception handling to catch other api level errors
47
+ try:
48
+ next(prompt_event_stream)
49
+ except ApiError as e:
50
+ self._handle_api_error(e)
53
51
 
54
52
  outputs: Optional[List[PromptOutput]] = None
55
53
  for event in prompt_event_stream:
@@ -65,3 +63,15 @@ class BasePromptNode(BaseNode, Generic[StateType]):
65
63
  raise NodeException.of(workflow_error)
66
64
 
67
65
  return outputs
66
+
67
+ def _handle_api_error(self, e: ApiError):
68
+ if e.status_code and e.status_code >= 400 and e.status_code < 500 and isinstance(e.body, dict):
69
+ raise NodeException(
70
+ message=e.body.get("detail", "Failed to execute prompt"),
71
+ code=WorkflowErrorCode.INVALID_INPUTS,
72
+ ) from e
73
+
74
+ raise NodeException(
75
+ message="Failed to execute prompt",
76
+ code=WorkflowErrorCode.INTERNAL_ERROR,
77
+ ) from e
@@ -1,8 +1,8 @@
1
1
  import pytest
2
2
  import os
3
- from typing import Any
3
+ from typing import Any, Union
4
4
 
5
- from vellum import CodeExecutorResponse, NumberVellumValue, StringInput
5
+ from vellum import CodeExecutorResponse, NumberVellumValue, StringInput, StringVellumValue
6
6
  from vellum.client.types.code_execution_package import CodeExecutionPackage
7
7
  from vellum.client.types.code_executor_secret_input import CodeExecutorSecretInput
8
8
  from vellum.client.types.function_call import FunctionCall
@@ -493,3 +493,69 @@ def main(word: str) -> dict:
493
493
  },
494
494
  "log": "",
495
495
  }
496
+
497
+
498
+ def test_run_node__array_input_with_vellum_values(vellum_client):
499
+ """Confirm that CodeExecutionNodes can handle arrays containing VellumValue objects."""
500
+
501
+ # GIVEN a node that subclasses CodeExecutionNode that processes an array of VellumValues
502
+ class State(BaseState):
503
+ pass
504
+
505
+ class ExampleCodeExecutionNode(CodeExecutionNode[State, str]):
506
+ code = """\
507
+ from typing import List, Dict
508
+ def main(arg1: List[Dict]) -> str:
509
+ return arg1[0]["value"] + " " + arg1[1]["value"]
510
+ """
511
+ runtime = "PYTHON_3_11_6"
512
+
513
+ code_inputs = {
514
+ "arg1": [
515
+ StringVellumValue(type="STRING", value="Hello", name="First"),
516
+ StringVellumValue(type="STRING", value="World", name="Second"),
517
+ ],
518
+ }
519
+
520
+ # WHEN we run the node
521
+ node = ExampleCodeExecutionNode(state=State())
522
+ outputs = node.run()
523
+
524
+ # THEN the node should successfully concatenate the values
525
+ assert outputs == {"result": "Hello World", "log": ""}
526
+
527
+ # AND we should not have invoked the Code via Vellum since it's running inline
528
+ vellum_client.execute_code.assert_not_called()
529
+
530
+
531
+ def test_run_node__union_output_type(vellum_client):
532
+ """Confirm that CodeExecutionNodes can handle Union output types."""
533
+
534
+ # GIVEN a node that subclasses CodeExecutionNode that returns a Union type
535
+ class State(BaseState):
536
+ pass
537
+
538
+ class ExampleCodeExecutionNode(CodeExecutionNode[State, Union[float, int]]):
539
+ code = """\
540
+ from typing import List, Dict
541
+ def main(arg1: List[Dict]) -> float:
542
+ return arg1[0]["value"] + arg1[1]["value"]
543
+ """
544
+ runtime = "PYTHON_3_11_6"
545
+
546
+ code_inputs = {
547
+ "arg1": [
548
+ NumberVellumValue(type="NUMBER", value=1.0, name="First"),
549
+ NumberVellumValue(type="NUMBER", value=2.0, name="Second"),
550
+ ],
551
+ }
552
+
553
+ # WHEN we run the node
554
+ node = ExampleCodeExecutionNode(state=State())
555
+ outputs = node.run()
556
+
557
+ # THEN the node should successfully sum the values
558
+ assert outputs == {"result": 3.0, "log": ""}
559
+
560
+ # AND we should not have invoked the Code via Vellum since it's running inline
561
+ vellum_client.execute_code.assert_not_called()
@@ -1,10 +1,11 @@
1
1
  import io
2
2
  import os
3
3
  import re
4
- from typing import Any, List, Tuple, Union
4
+ from typing import Any, List, Tuple, Union, get_args, get_origin
5
5
 
6
6
  from pydantic import BaseModel, ValidationError
7
7
 
8
+ from vellum import VellumValue
8
9
  from vellum.client.types.code_executor_input import CodeExecutorInput
9
10
  from vellum.workflows.errors.types import WorkflowErrorCode
10
11
  from vellum.workflows.exceptions import NodeException
@@ -74,8 +75,25 @@ def run_code_inline(
74
75
  ) -> Tuple[str, Any]:
75
76
  log_buffer = io.StringIO()
76
77
 
78
+ VELLUM_TYPES = get_args(VellumValue)
79
+
80
+ def wrap_value(value):
81
+ if isinstance(value, list):
82
+ return ListWrapper(
83
+ [
84
+ # Convert VellumValue to dict with its fields
85
+ (
86
+ item.model_dump()
87
+ if isinstance(item, VELLUM_TYPES)
88
+ else _clean_for_dict_wrapper(item) if isinstance(item, (dict, list)) else item
89
+ )
90
+ for item in value
91
+ ]
92
+ )
93
+ return _clean_for_dict_wrapper(value)
94
+
77
95
  exec_globals = {
78
- "__arg__inputs": {input_value.name: _clean_for_dict_wrapper(input_value.value) for input_value in input_values},
96
+ "__arg__inputs": {input_value.name: wrap_value(input_value.value) for input_value in input_values},
79
97
  "__arg__out": None,
80
98
  "print": lambda *args, **kwargs: log_buffer.write(f"{' '.join(args)}\n"),
81
99
  }
@@ -92,7 +110,14 @@ __arg__out = main({", ".join(run_args)})
92
110
  result = exec_globals["__arg__out"]
93
111
 
94
112
  if output_type != Any:
95
- if issubclass(output_type, BaseModel) and not isinstance(result, output_type):
113
+ if get_origin(output_type) is Union:
114
+ allowed_types = get_args(output_type)
115
+ if not isinstance(result, allowed_types):
116
+ raise NodeException(
117
+ code=WorkflowErrorCode.INVALID_OUTPUTS,
118
+ message=f"Expected output to be in types {allowed_types}, but received '{type(result).__name__}'",
119
+ )
120
+ elif issubclass(output_type, BaseModel) and not isinstance(result, output_type):
96
121
  try:
97
122
  result = output_type.model_validate(result)
98
123
  except ValidationError as e:
@@ -100,12 +125,10 @@ __arg__out = main({", ".join(run_args)})
100
125
  code=WorkflowErrorCode.INVALID_OUTPUTS,
101
126
  message=re.sub(r"\s+For further information visit [^\s]+", "", str(e)),
102
127
  ) from e
103
-
104
- if not isinstance(result, output_type):
128
+ elif not isinstance(result, output_type):
105
129
  raise NodeException(
106
130
  code=WorkflowErrorCode.INVALID_OUTPUTS,
107
- message=f"Expected an output of type '{output_type.__name__}',"
108
- f" but received '{result.__class__.__name__}'",
131
+ message=f"Expected an output of type '{output_type.__name__}', but received '{type(result).__name__}'",
109
132
  )
110
133
 
111
134
  return logs, result
@@ -4,7 +4,7 @@ from typing_extensions import dataclass_transform
4
4
  from pydantic import GetCoreSchemaHandler
5
5
  from pydantic_core import core_schema
6
6
 
7
- from vellum.workflows.constants import UNDEF
7
+ from vellum.workflows.constants import undefined
8
8
  from vellum.workflows.descriptors.base import BaseDescriptor
9
9
  from vellum.workflows.errors.types import WorkflowErrorCode
10
10
  from vellum.workflows.exceptions import NodeException
@@ -19,17 +19,17 @@ _Accumulated = TypeVar("_Accumulated")
19
19
 
20
20
 
21
21
  class BaseOutput(Generic[_Delta, _Accumulated]):
22
- _value: Union[_Accumulated, Type[UNDEF]]
23
- _delta: Union[_Delta, Type[UNDEF]]
22
+ _value: Union[_Accumulated, Type[undefined]]
23
+ _delta: Union[_Delta, Type[undefined]]
24
24
  _name: str
25
25
 
26
26
  def __init__(
27
27
  self,
28
28
  name: str,
29
- value: Union[_Accumulated, Type[UNDEF]] = UNDEF,
30
- delta: Union[_Delta, Type[UNDEF]] = UNDEF,
29
+ value: Union[_Accumulated, Type[undefined]] = undefined,
30
+ delta: Union[_Delta, Type[undefined]] = undefined,
31
31
  ) -> None:
32
- if value is not UNDEF and delta is not UNDEF:
32
+ if value is not undefined and delta is not undefined:
33
33
  raise ValueError("Cannot set both value and delta")
34
34
 
35
35
  self._name = name
@@ -37,24 +37,24 @@ class BaseOutput(Generic[_Delta, _Accumulated]):
37
37
  self._delta = delta
38
38
 
39
39
  @property
40
- def delta(self) -> Union[_Delta, Type[UNDEF]]:
40
+ def delta(self) -> Union[_Delta, Type[undefined]]:
41
41
  return self._delta
42
42
 
43
43
  @property
44
- def value(self) -> Union[_Accumulated, Type[UNDEF]]:
44
+ def value(self) -> Union[_Accumulated, Type[undefined]]:
45
45
  return self._value
46
46
 
47
47
  @property
48
48
  def is_initiated(self) -> bool:
49
- return self._delta is UNDEF and self._value is UNDEF
49
+ return self._delta is undefined and self._value is undefined
50
50
 
51
51
  @property
52
52
  def is_streaming(self) -> bool:
53
- return self._delta is not UNDEF and self._value is UNDEF
53
+ return self._delta is not undefined and self._value is undefined
54
54
 
55
55
  @property
56
56
  def is_fulfilled(self) -> bool:
57
- return self._delta is UNDEF and self._value is not UNDEF
57
+ return self._delta is undefined and self._value is not undefined
58
58
 
59
59
  @property
60
60
  def name(self) -> str:
@@ -71,18 +71,18 @@ class BaseOutput(Generic[_Delta, _Accumulated]):
71
71
  "name": self.name,
72
72
  }
73
73
 
74
- if self.value is not UNDEF:
74
+ if self.value is not undefined:
75
75
  data["value"] = self.value
76
76
 
77
- if self.delta is not UNDEF:
77
+ if self.delta is not undefined:
78
78
  data["delta"] = self.delta
79
79
 
80
80
  return data
81
81
 
82
82
  def __repr__(self) -> str:
83
- if self.value is not UNDEF:
83
+ if self.value is not undefined:
84
84
  return f"{self.__class__.__name__}({self.name}={self.value})"
85
- elif self.delta is not UNDEF:
85
+ elif self.delta is not undefined:
86
86
  return f"{self.__class__.__name__}({self.name}={self.delta})"
87
87
  else:
88
88
  return f"{self.__class__.__name__}(name='{self.name}')"
@@ -144,7 +144,7 @@ class _BaseOutputsMeta(type):
144
144
  # We first try to resolve the instance that this class attribute name is mapped to. If it's not found,
145
145
  # we iterate through its inheritance hierarchy to find the first base class that has this attribute
146
146
  # and use its mapping.
147
- instance = vars(cls).get(name, UNDEF)
147
+ instance = vars(cls).get(name, undefined)
148
148
  if not instance:
149
149
  for base in cls.__mro__[1:]:
150
150
  if hasattr(base, name):
@@ -204,7 +204,9 @@ class BaseOutputs(metaclass=_BaseOutputsMeta):
204
204
  if not isinstance(other, dict):
205
205
  return super().__eq__(other)
206
206
 
207
- outputs = {name: value for name, value in vars(self).items() if not name.startswith("_") and value is not UNDEF}
207
+ outputs = {
208
+ name: value for name, value in vars(self).items() if not name.startswith("_") and value is not undefined
209
+ }
208
210
  return outputs == other
209
211
 
210
212
  def __repr__(self) -> str:
@@ -213,9 +215,9 @@ class BaseOutputs(metaclass=_BaseOutputsMeta):
213
215
 
214
216
  def __iter__(self) -> Iterator[Tuple[OutputReference, Any]]:
215
217
  for output_descriptor in self.__class__:
216
- output_value = getattr(self, output_descriptor.name, UNDEF)
218
+ output_value = getattr(self, output_descriptor.name, undefined)
217
219
  if isinstance(output_value, BaseDescriptor):
218
- output_value = UNDEF
220
+ output_value = undefined
219
221
 
220
222
  yield (output_descriptor, output_value)
221
223
 
@@ -4,7 +4,10 @@ from pydantic import GetCoreSchemaHandler
4
4
  from pydantic_core import core_schema
5
5
 
6
6
  from vellum.workflows.descriptors.base import BaseDescriptor
7
+ from vellum.workflows.descriptors.exceptions import InvalidExpressionException
7
8
  from vellum.workflows.edges.edge import Edge
9
+ from vellum.workflows.errors.types import WorkflowErrorCode
10
+ from vellum.workflows.exceptions import NodeException
8
11
  from vellum.workflows.graph import Graph, GraphTarget
9
12
  from vellum.workflows.state.base import BaseState
10
13
  from vellum.workflows.types.core import ConditionType
@@ -82,11 +85,17 @@ class Port:
82
85
  return Port(condition_type=ConditionType.ELSE, fork_state=fork_state)
83
86
 
84
87
  def resolve_condition(self, state: BaseState) -> bool:
85
- if self._condition is None:
86
- return False
87
-
88
- value = self._condition.resolve(state)
89
- return bool(value)
88
+ try:
89
+ if self._condition is None:
90
+ return False
91
+
92
+ value = self._condition.resolve(state)
93
+ return bool(value)
94
+ except InvalidExpressionException as e:
95
+ raise NodeException(
96
+ message=f"Failed to resolve condition for port `{self.name}`: {e}",
97
+ code=WorkflowErrorCode.INVALID_INPUTS,
98
+ ) from e
90
99
 
91
100
  def serialize(self) -> dict:
92
101
  return {
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Any, Generic, Optional, Tuple, Type, TypeVar,
3
3
  from pydantic import GetCoreSchemaHandler
4
4
  from pydantic_core import core_schema
5
5
 
6
- from vellum.workflows.constants import UNDEF
6
+ from vellum.workflows.constants import undefined
7
7
  from vellum.workflows.descriptors.base import BaseDescriptor
8
8
  from vellum.workflows.errors.types import WorkflowErrorCode
9
9
  from vellum.workflows.exceptions import NodeException
@@ -34,7 +34,7 @@ class ExternalInputReference(BaseDescriptor[_InputType], Generic[_InputType]):
34
34
 
35
35
  def resolve(self, state: "BaseState") -> _InputType:
36
36
  external_input = state.meta.external_inputs.get(self)
37
- if external_input is not UNDEF:
37
+ if external_input is not undefined:
38
38
  return cast(_InputType, external_input)
39
39
 
40
40
  if state.meta.parent:
@@ -2,7 +2,7 @@ import ast
2
2
  import inspect
3
3
  from typing import TYPE_CHECKING, Callable, Generic, TypeVar, Union, get_args
4
4
 
5
- from vellum.workflows.constants import UNDEF
5
+ from vellum.workflows.constants import undefined
6
6
  from vellum.workflows.descriptors.base import BaseDescriptor
7
7
 
8
8
  if TYPE_CHECKING:
@@ -37,7 +37,7 @@ class LazyReference(BaseDescriptor[_T], Generic[_T]):
37
37
 
38
38
  # Fix typing surrounding the return value of node outputs/output descriptors
39
39
  # https://app.shortcut.com/vellum/story/4783
40
- return UNDEF # type: ignore[return-value]
40
+ return undefined # type: ignore[return-value]
41
41
 
42
42
  return resolve_value(self._get(), state)
43
43
 
@@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any, Generator, Generic, Optional, Tuple, Type
4
4
  from pydantic import GetCoreSchemaHandler
5
5
  from pydantic_core import core_schema
6
6
 
7
- from vellum.workflows.constants import UNDEF
7
+ from vellum.workflows.constants import undefined
8
8
  from vellum.workflows.descriptors.base import BaseDescriptor
9
9
 
10
10
  if TYPE_CHECKING:
@@ -32,13 +32,13 @@ class OutputReference(BaseDescriptor[_OutputType], Generic[_OutputType]):
32
32
  return self._outputs_class
33
33
 
34
34
  def resolve(self, state: "BaseState") -> _OutputType:
35
- node_output = state.meta.node_outputs.get(self, UNDEF)
35
+ node_output = state.meta.node_outputs.get(self, undefined)
36
36
  if isinstance(node_output, Queue):
37
37
  # Fix typing surrounding the return value of node outputs
38
38
  # https://app.shortcut.com/vellum/story/4783
39
39
  return self._as_generator(node_output) # type: ignore[return-value]
40
40
 
41
- if node_output is not UNDEF:
41
+ if node_output is not undefined:
42
42
  return cast(_OutputType, node_output)
43
43
 
44
44
  if state.meta.parent:
@@ -46,13 +46,13 @@ class OutputReference(BaseDescriptor[_OutputType], Generic[_OutputType]):
46
46
 
47
47
  # Fix typing surrounding the return value of node outputs
48
48
  # https://app.shortcut.com/vellum/story/4783
49
- return cast(Type[UNDEF], node_output) # type: ignore[return-value]
49
+ return cast(Type[undefined], node_output) # type: ignore[return-value]
50
50
 
51
- def _as_generator(self, node_output: Queue) -> Generator[_OutputType, None, Type[UNDEF]]:
51
+ def _as_generator(self, node_output: Queue) -> Generator[_OutputType, None, Type[undefined]]:
52
52
  while True:
53
53
  item = node_output.get()
54
- if item is UNDEF:
55
- return UNDEF
54
+ if item is undefined:
55
+ return undefined
56
56
  yield cast(_OutputType, item)
57
57
 
58
58
  def __eq__(self, other: object) -> bool: