vellum-ai 0.11.9__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. vellum/client/core/client_wrapper.py +1 -1
  2. vellum/workflows/descriptors/base.py +2 -2
  3. vellum/workflows/descriptors/tests/test_utils.py +4 -4
  4. vellum/workflows/errors/__init__.py +3 -3
  5. vellum/workflows/errors/types.py +46 -3
  6. vellum/workflows/events/node.py +3 -3
  7. vellum/workflows/events/tests/test_event.py +3 -3
  8. vellum/workflows/events/workflow.py +3 -3
  9. vellum/workflows/exceptions.py +8 -4
  10. vellum/workflows/nodes/bases/base.py +9 -2
  11. vellum/workflows/nodes/bases/tests/test_base_node.py +13 -0
  12. vellum/workflows/nodes/core/error_node/node.py +9 -5
  13. vellum/workflows/nodes/core/inline_subworkflow_node/node.py +3 -13
  14. vellum/workflows/nodes/core/map_node/node.py +2 -2
  15. vellum/workflows/nodes/core/retry_node/node.py +5 -5
  16. vellum/workflows/nodes/core/retry_node/tests/test_node.py +6 -6
  17. vellum/workflows/nodes/core/templating_node/node.py +2 -2
  18. vellum/workflows/nodes/core/try_node/node.py +7 -7
  19. vellum/workflows/nodes/core/try_node/tests/test_node.py +9 -7
  20. vellum/workflows/nodes/displayable/bases/api_node/node.py +3 -3
  21. vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +4 -12
  22. vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +2 -2
  23. vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py +2 -2
  24. vellum/workflows/nodes/displayable/bases/search_node.py +3 -3
  25. vellum/workflows/nodes/displayable/code_execution_node/node.py +21 -5
  26. vellum/workflows/nodes/displayable/code_execution_node/tests/test_code_execution_node.py +141 -0
  27. vellum/workflows/nodes/displayable/guardrail_node/node.py +3 -3
  28. vellum/workflows/nodes/displayable/inline_prompt_node/node.py +3 -3
  29. vellum/workflows/nodes/displayable/prompt_deployment_node/node.py +3 -3
  30. vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py +7 -14
  31. vellum/workflows/nodes/displayable/tests/test_inline_text_prompt_node.py +4 -4
  32. vellum/workflows/nodes/utils.py +5 -9
  33. vellum/workflows/references/external_input.py +2 -2
  34. vellum/workflows/references/node.py +2 -2
  35. vellum/workflows/references/state_value.py +2 -2
  36. vellum/workflows/references/workflow_input.py +2 -2
  37. vellum/workflows/runner/runner.py +15 -15
  38. {vellum_ee/workflows/display → vellum/workflows}/utils/tests/test_uuids.py +1 -1
  39. vellum/workflows/workflows/base.py +7 -7
  40. {vellum_ai-0.11.9.dist-info → vellum_ai-0.12.0.dist-info}/METADATA +1 -1
  41. {vellum_ai-0.11.9.dist-info → vellum_ai-0.12.0.dist-info}/RECORD +61 -62
  42. {vellum_ai-0.11.9.dist-info → vellum_ai-0.12.0.dist-info}/WHEEL +1 -1
  43. vellum_ee/workflows/display/nodes/base_node_display.py +50 -21
  44. vellum_ee/workflows/display/nodes/base_node_vellum_display.py +10 -1
  45. vellum_ee/workflows/display/nodes/get_node_display_class.py +10 -1
  46. vellum_ee/workflows/display/nodes/tests/test_base_node_display.py +5 -4
  47. vellum_ee/workflows/display/nodes/vellum/code_execution_node.py +13 -3
  48. vellum_ee/workflows/display/nodes/vellum/conditional_node.py +1 -1
  49. vellum_ee/workflows/display/nodes/vellum/final_output_node.py +1 -1
  50. vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +1 -1
  51. vellum_ee/workflows/display/nodes/vellum/merge_node.py +4 -4
  52. vellum_ee/workflows/display/nodes/vellum/search_node.py +1 -1
  53. vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py +5 -1
  54. vellum_ee/workflows/display/nodes/vellum/try_node.py +12 -6
  55. vellum_ee/workflows/display/nodes/vellum/utils.py +1 -1
  56. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_code_execution_node_serialization.py +269 -85
  57. vellum_ee/workflows/display/workflows/base_workflow_display.py +2 -2
  58. vellum_ee/workflows/display/workflows/vellum_workflow_display.py +2 -6
  59. vellum_ee/workflows/display/utils/tests/__init__.py +0 -0
  60. {vellum_ee/workflows/display → vellum/workflows}/utils/uuids.py +0 -0
  61. {vellum_ai-0.11.9.dist-info → vellum_ai-0.12.0.dist-info}/LICENSE +0 -0
  62. {vellum_ai-0.11.9.dist-info → vellum_ai-0.12.0.dist-info}/entry_points.txt +0 -0
@@ -3,7 +3,7 @@ from typing import ClassVar, Generator, Generic, Iterator, List, Optional, Union
3
3
 
4
4
  from vellum import AdHocExecutePromptEvent, ExecutePromptEvent, PromptOutput
5
5
  from vellum.core import RequestOptions
6
- from vellum.workflows.errors.types import VellumErrorCode
6
+ from vellum.workflows.errors.types import WorkflowErrorCode, vellum_error_to_workflow_error
7
7
  from vellum.workflows.exceptions import NodeException
8
8
  from vellum.workflows.nodes.bases import BaseNode
9
9
  from vellum.workflows.outputs.base import BaseOutput, BaseOutputs
@@ -29,7 +29,7 @@ class BasePromptNode(BaseNode, Generic[StateType]):
29
29
  if outputs is None:
30
30
  raise NodeException(
31
31
  message="Expected to receive outputs from Prompt",
32
- code=VellumErrorCode.INTERNAL_ERROR,
32
+ code=WorkflowErrorCode.INTERNAL_ERROR,
33
33
  )
34
34
 
35
35
  def _process_prompt_event_stream(self) -> Generator[BaseOutput, None, Optional[List[PromptOutput]]]:
@@ -45,15 +45,7 @@ class BasePromptNode(BaseNode, Generic[StateType]):
45
45
  outputs = event.outputs
46
46
  yield BaseOutput(name="results", value=event.outputs)
47
47
  elif event.state == "REJECTED":
48
- if event.error.code in VellumErrorCode._value2member_map_:
49
- raise NodeException(
50
- message=event.error.message,
51
- code=VellumErrorCode(event.error.code),
52
- )
53
- else:
54
- raise NodeException(
55
- message=event.error.message,
56
- code=VellumErrorCode.INTERNAL_ERROR,
57
- )
48
+ workflow_error = vellum_error_to_workflow_error(event.error)
49
+ raise NodeException.of(workflow_error)
58
50
 
59
51
  return outputs
@@ -17,7 +17,7 @@ from vellum import (
17
17
  from vellum.client import RequestOptions
18
18
  from vellum.workflows.constants import OMIT
19
19
  from vellum.workflows.context import get_parent_context
20
- from vellum.workflows.errors import VellumErrorCode
20
+ from vellum.workflows.errors import WorkflowErrorCode
21
21
  from vellum.workflows.exceptions import NodeException
22
22
  from vellum.workflows.nodes.displayable.bases.base_prompt_node import BasePromptNode
23
23
  from vellum.workflows.nodes.displayable.bases.inline_prompt_node.constants import DEFAULT_PROMPT_PARAMETERS
@@ -120,7 +120,7 @@ class BaseInlinePromptNode(BasePromptNode, Generic[StateType]):
120
120
  else:
121
121
  raise NodeException(
122
122
  message=f"Unrecognized input type for input '{input_name}': {input_value.__class__}",
123
- code=VellumErrorCode.INVALID_INPUTS,
123
+ code=WorkflowErrorCode.INVALID_INPUTS,
124
124
  )
125
125
 
126
126
  return input_variables, input_values
@@ -14,7 +14,7 @@ from vellum import (
14
14
  from vellum.client import RequestOptions
15
15
  from vellum.workflows.constants import LATEST_RELEASE_TAG, OMIT
16
16
  from vellum.workflows.context import get_parent_context
17
- from vellum.workflows.errors import VellumErrorCode
17
+ from vellum.workflows.errors import WorkflowErrorCode
18
18
  from vellum.workflows.exceptions import NodeException
19
19
  from vellum.workflows.nodes.displayable.bases.base_prompt_node import BasePromptNode
20
20
  from vellum.workflows.types.generics import StateType
@@ -101,7 +101,7 @@ class BasePromptDeploymentNode(BasePromptNode, Generic[StateType]):
101
101
  else:
102
102
  raise NodeException(
103
103
  message=f"Unrecognized input type for input '{input_name}': {input_value.__class__}",
104
- code=VellumErrorCode.INVALID_INPUTS,
104
+ code=WorkflowErrorCode.INVALID_INPUTS,
105
105
  )
106
106
 
107
107
  return compiled_inputs
@@ -12,7 +12,7 @@ from vellum import (
12
12
  SearchWeightsRequest,
13
13
  )
14
14
  from vellum.core import ApiError, RequestOptions
15
- from vellum.workflows.errors import VellumErrorCode
15
+ from vellum.workflows.errors import WorkflowErrorCode
16
16
  from vellum.workflows.exceptions import NodeException
17
17
  from vellum.workflows.nodes.bases import BaseNode
18
18
  from vellum.workflows.outputs import BaseOutputs
@@ -83,12 +83,12 @@ class BaseSearchNode(BaseNode[StateType], Generic[StateType]):
83
83
  except NotFoundError:
84
84
  raise NodeException(
85
85
  message=f"Document Index '{self.document_index}' not found",
86
- code=VellumErrorCode.INVALID_INPUTS,
86
+ code=WorkflowErrorCode.INVALID_INPUTS,
87
87
  )
88
88
  except ApiError:
89
89
  raise NodeException(
90
90
  message=f"An error occurred while searching against Document Index '{self.document_index}'", # noqa: E501
91
- code=VellumErrorCode.INTERNAL_ERROR,
91
+ code=WorkflowErrorCode.INTERNAL_ERROR,
92
92
  )
93
93
 
94
94
  def run(self) -> Outputs:
@@ -20,7 +20,7 @@ from vellum import (
20
20
  VellumValue,
21
21
  )
22
22
  from vellum.core import RequestOptions
23
- from vellum.workflows.errors.types import VellumErrorCode
23
+ from vellum.workflows.errors.types import WorkflowErrorCode
24
24
  from vellum.workflows.exceptions import NodeException
25
25
  from vellum.workflows.nodes.bases import BaseNode
26
26
  from vellum.workflows.nodes.bases.base import BaseNodeMeta
@@ -73,7 +73,8 @@ class CodeExecutionNode(BaseNode[StateType], Generic[StateType, _OutputType], me
73
73
  request_options: Optional[RequestOptions] = None - The request options to use for the custom script.
74
74
  """
75
75
 
76
- filepath: ClassVar[str]
76
+ filepath: ClassVar[Optional[str]] = None
77
+ code: ClassVar[Optional[str]] = None
77
78
 
78
79
  code_inputs: ClassVar[EntityInputsInterface]
79
80
  runtime: CodeExecutionRuntime = "PYTHON_3_11_6"
@@ -101,7 +102,7 @@ class CodeExecutionNode(BaseNode[StateType], Generic[StateType, _OutputType], me
101
102
 
102
103
  if code_execution.output.type != expected_output_type:
103
104
  raise NodeException(
104
- code=VellumErrorCode.INVALID_OUTPUTS,
105
+ code=WorkflowErrorCode.INVALID_OUTPUTS,
105
106
  message=f"Expected an output of type '{expected_output_type}', received '{code_execution.output.type}'",
106
107
  )
107
108
 
@@ -184,18 +185,33 @@ class CodeExecutionNode(BaseNode[StateType], Generic[StateType, _OutputType], me
184
185
  else:
185
186
  raise NodeException(
186
187
  message=f"Unrecognized input type for input '{input_name}'",
187
- code=VellumErrorCode.INVALID_INPUTS,
188
+ code=WorkflowErrorCode.INVALID_INPUTS,
188
189
  )
189
190
 
190
191
  return compiled_inputs
191
192
 
192
193
  def _resolve_code(self) -> str:
194
+ if self.code and self.filepath:
195
+ raise NodeException(
196
+ message="Cannot specify both `code` and `filepath` for a CodeExecutionNode",
197
+ code=WorkflowErrorCode.INVALID_INPUTS,
198
+ )
199
+
200
+ if self.code:
201
+ return self.code
202
+
203
+ if not self.filepath:
204
+ raise NodeException(
205
+ message="Must specify either `code` or `filepath` for a CodeExecutionNode",
206
+ code=WorkflowErrorCode.INVALID_INPUTS,
207
+ )
208
+
193
209
  root = inspect.getfile(self.__class__)
194
210
  code = read_file_from_path(node_filepath=root, script_filepath=self.filepath)
195
211
  if not code:
196
212
  raise NodeException(
197
213
  message=f"Filepath '{self.filepath}' does not exist",
198
- code=VellumErrorCode.INVALID_INPUTS,
214
+ code=WorkflowErrorCode.INVALID_INPUTS,
199
215
  )
200
216
 
201
217
  return code
@@ -1,6 +1,8 @@
1
+ import pytest
1
2
  import os
2
3
 
3
4
  from vellum import CodeExecutorResponse, NumberVellumValue, StringInput
5
+ from vellum.workflows.exceptions import NodeException
4
6
  from vellum.workflows.inputs.base import BaseInputs
5
7
  from vellum.workflows.nodes.displayable.code_execution_node import CodeExecutionNode
6
8
  from vellum.workflows.references.vellum_secret import VellumSecretReference
@@ -62,6 +64,145 @@ def main(word: str) -> int:
62
64
  )
63
65
 
64
66
 
67
+ def test_run_workflow__code_attribute(vellum_client):
68
+ """Confirm that CodeExecutionNodes can use the `code` attribute to specify the code to execute."""
69
+
70
+ # GIVEN a node that subclasses CodeExecutionNode
71
+ class Inputs(BaseInputs):
72
+ word: str
73
+
74
+ class State(BaseState):
75
+ pass
76
+
77
+ class ExampleCodeExecutionNode(CodeExecutionNode[State, int]):
78
+ code = """\
79
+ def main(word: str) -> int:
80
+ print(word) # noqa: T201
81
+ return len(word)
82
+ """
83
+ runtime = "PYTHON_3_11_6"
84
+
85
+ code_inputs = {
86
+ "word": Inputs.word,
87
+ }
88
+
89
+ # AND we know what the Code Execution Node will respond with
90
+ mock_code_execution = CodeExecutorResponse(
91
+ log="hello",
92
+ output=NumberVellumValue(value=5),
93
+ )
94
+ vellum_client.execute_code.return_value = mock_code_execution
95
+
96
+ # WHEN we run the node
97
+ node = ExampleCodeExecutionNode(
98
+ state=State(
99
+ meta=StateMeta(workflow_inputs=Inputs(word="hello")),
100
+ )
101
+ )
102
+ outputs = node.run()
103
+
104
+ # THEN the node should have produced the outputs we expect
105
+ assert outputs == {"result": 5, "log": "hello"}
106
+
107
+ # AND we should have invoked the Code with the expected inputs
108
+ vellum_client.execute_code.assert_called_once_with(
109
+ input_values=[
110
+ StringInput(name="word", value="hello"),
111
+ ],
112
+ code="""\
113
+ def main(word: str) -> int:
114
+ print(word) # noqa: T201
115
+ return len(word)
116
+ """,
117
+ runtime="PYTHON_3_11_6",
118
+ output_type="NUMBER",
119
+ packages=[],
120
+ request_options=None,
121
+ )
122
+
123
+
124
+ def test_run_workflow__code_and_filepath_defined(vellum_client):
125
+ """Confirm that CodeExecutionNodes raise an error if both `code` and `filepath` are defined."""
126
+
127
+ # GIVEN a node that subclasses CodeExecutionNode
128
+ class Inputs(BaseInputs):
129
+ word: str
130
+
131
+ class State(BaseState):
132
+ pass
133
+
134
+ fixture = os.path.abspath(os.path.join(__file__, "../fixtures/main.py"))
135
+
136
+ class ExampleCodeExecutionNode(CodeExecutionNode[State, int]):
137
+ filepath = fixture
138
+ code = """\
139
+ def main(word: str) -> int:
140
+ print(word) # noqa: T201
141
+ return len(word)
142
+ """
143
+ runtime = "PYTHON_3_11_6"
144
+
145
+ code_inputs = {
146
+ "word": Inputs.word,
147
+ }
148
+
149
+ # AND we know what the Code Execution Node will respond with
150
+ mock_code_execution = CodeExecutorResponse(
151
+ log="hello",
152
+ output=NumberVellumValue(value=5),
153
+ )
154
+ vellum_client.execute_code.return_value = mock_code_execution
155
+
156
+ # WHEN we run the node
157
+ node = ExampleCodeExecutionNode(
158
+ state=State(
159
+ meta=StateMeta(workflow_inputs=Inputs(word="hello")),
160
+ )
161
+ )
162
+ with pytest.raises(NodeException) as exc_info:
163
+ node.run()
164
+
165
+ # THEN the node should have produced the exception we expected
166
+ assert exc_info.value.message == "Cannot specify both `code` and `filepath` for a CodeExecutionNode"
167
+
168
+
169
+ def test_run_workflow__code_and_filepath_not_defined(vellum_client):
170
+ """Confirm that CodeExecutionNodes raise an error if neither `code` nor `filepath` are defined."""
171
+
172
+ # GIVEN a node that subclasses CodeExecutionNode
173
+ class Inputs(BaseInputs):
174
+ word: str
175
+
176
+ class State(BaseState):
177
+ pass
178
+
179
+ class ExampleCodeExecutionNode(CodeExecutionNode[State, int]):
180
+ runtime = "PYTHON_3_11_6"
181
+
182
+ code_inputs = {
183
+ "word": Inputs.word,
184
+ }
185
+
186
+ # AND we know what the Code Execution Node will respond with
187
+ mock_code_execution = CodeExecutorResponse(
188
+ log="hello",
189
+ output=NumberVellumValue(value=5),
190
+ )
191
+ vellum_client.execute_code.return_value = mock_code_execution
192
+
193
+ # WHEN we run the node
194
+ node = ExampleCodeExecutionNode(
195
+ state=State(
196
+ meta=StateMeta(workflow_inputs=Inputs(word="hello")),
197
+ )
198
+ )
199
+ with pytest.raises(NodeException) as exc_info:
200
+ node.run()
201
+
202
+ # THEN the node should have produced the exception we expected
203
+ assert exc_info.value.message == "Must specify either `code` or `filepath` for a CodeExecutionNode"
204
+
205
+
65
206
  def test_run_workflow__vellum_secret(vellum_client):
66
207
  """Confirm that CodeExecutionNodes can use Vellum Secrets"""
67
208
 
@@ -4,7 +4,7 @@ from typing import Any, ClassVar, Dict, Generic, List, Optional, Union, cast
4
4
  from vellum import ChatHistoryInput, ChatMessage, JsonInput, MetricDefinitionInput, NumberInput, StringInput
5
5
  from vellum.core import RequestOptions
6
6
  from vellum.workflows.constants import LATEST_RELEASE_TAG
7
- from vellum.workflows.errors.types import VellumErrorCode
7
+ from vellum.workflows.errors.types import WorkflowErrorCode
8
8
  from vellum.workflows.exceptions import NodeException
9
9
  from vellum.workflows.nodes.bases import BaseNode
10
10
  from vellum.workflows.outputs.base import BaseOutputs
@@ -46,7 +46,7 @@ class GuardrailNode(BaseNode[StateType], Generic[StateType]):
46
46
  if not isinstance(score, float):
47
47
  raise NodeException(
48
48
  message="Metric execution must have one output named 'score' with type 'float'",
49
- code=VellumErrorCode.INVALID_OUTPUTS,
49
+ code=WorkflowErrorCode.INVALID_OUTPUTS,
50
50
  )
51
51
 
52
52
  metric_outputs.pop("score")
@@ -90,7 +90,7 @@ class GuardrailNode(BaseNode[StateType], Generic[StateType]):
90
90
  else:
91
91
  raise NodeException(
92
92
  message=f"Unrecognized input type for input '{input_name}'",
93
- code=VellumErrorCode.INVALID_INPUTS,
93
+ code=WorkflowErrorCode.INVALID_INPUTS,
94
94
  )
95
95
 
96
96
  return compiled_inputs
@@ -1,6 +1,6 @@
1
1
  from typing import Iterator
2
2
 
3
- from vellum.workflows.errors import VellumErrorCode
3
+ from vellum.workflows.errors import WorkflowErrorCode
4
4
  from vellum.workflows.exceptions import NodeException
5
5
  from vellum.workflows.nodes.displayable.bases import BaseInlinePromptNode as BaseInlinePromptNode
6
6
  from vellum.workflows.outputs import BaseOutput
@@ -34,7 +34,7 @@ class InlinePromptNode(BaseInlinePromptNode[StateType]):
34
34
  if not outputs:
35
35
  raise NodeException(
36
36
  message="Expected to receive outputs from Prompt",
37
- code=VellumErrorCode.INTERNAL_ERROR,
37
+ code=WorkflowErrorCode.INTERNAL_ERROR,
38
38
  )
39
39
 
40
40
  string_output = next((output for output in outputs if output.type == "STRING"), None)
@@ -43,7 +43,7 @@ class InlinePromptNode(BaseInlinePromptNode[StateType]):
43
43
  is_plural = len(output_types) > 1
44
44
  raise NodeException(
45
45
  message=f"Expected to receive a non-null string output from Prompt. Only found outputs of type{'s' if is_plural else ''}: {', '.join(output_types)}", # noqa: E501
46
- code=VellumErrorCode.INTERNAL_ERROR,
46
+ code=WorkflowErrorCode.INTERNAL_ERROR,
47
47
  )
48
48
 
49
49
  yield BaseOutput(name="text", value=string_output.value)
@@ -1,6 +1,6 @@
1
1
  from typing import Iterator
2
2
 
3
- from vellum.workflows.errors import VellumErrorCode
3
+ from vellum.workflows.errors import WorkflowErrorCode
4
4
  from vellum.workflows.exceptions import NodeException
5
5
  from vellum.workflows.nodes.displayable.bases import BasePromptDeploymentNode as BasePromptDeploymentNode
6
6
  from vellum.workflows.outputs import BaseOutput
@@ -37,7 +37,7 @@ class PromptDeploymentNode(BasePromptDeploymentNode[StateType]):
37
37
  if not outputs:
38
38
  raise NodeException(
39
39
  message="Expected to receive outputs from Prompt",
40
- code=VellumErrorCode.INTERNAL_ERROR,
40
+ code=WorkflowErrorCode.INTERNAL_ERROR,
41
41
  )
42
42
 
43
43
  string_output = next((output for output in outputs if output.type == "STRING"), None)
@@ -46,7 +46,7 @@ class PromptDeploymentNode(BasePromptDeploymentNode[StateType]):
46
46
  is_plural = len(output_types) > 1
47
47
  raise NodeException(
48
48
  message=f"Expected to receive a non-null string output from Prompt. Only found outputs of type{'s' if is_plural else ''}: {', '.join(output_types)}", # noqa: E501
49
- code=VellumErrorCode.INTERNAL_ERROR,
49
+ code=WorkflowErrorCode.INTERNAL_ERROR,
50
50
  )
51
51
 
52
52
  yield BaseOutput(name="text", value=string_output.value)
@@ -14,7 +14,8 @@ from vellum import (
14
14
  from vellum.core import RequestOptions
15
15
  from vellum.workflows.constants import LATEST_RELEASE_TAG, OMIT
16
16
  from vellum.workflows.context import get_parent_context
17
- from vellum.workflows.errors import VellumErrorCode
17
+ from vellum.workflows.errors import WorkflowErrorCode
18
+ from vellum.workflows.errors.types import workflow_event_error_to_workflow_error
18
19
  from vellum.workflows.exceptions import NodeException
19
20
  from vellum.workflows.nodes.bases.base_subworkflow_node.node import BaseSubworkflowNode
20
21
  from vellum.workflows.outputs.base import BaseOutput
@@ -84,7 +85,7 @@ class SubworkflowDeploymentNode(BaseSubworkflowNode[StateType], Generic[StateTyp
84
85
  else:
85
86
  raise NodeException(
86
87
  message=f"Unrecognized input type for input '{input_name}'",
87
- code=VellumErrorCode.INVALID_INPUTS,
88
+ code=WorkflowErrorCode.INVALID_INPUTS,
88
89
  )
89
90
 
90
91
  return compiled_inputs
@@ -136,23 +137,15 @@ class SubworkflowDeploymentNode(BaseSubworkflowNode[StateType], Generic[StateTyp
136
137
  if not error:
137
138
  raise NodeException(
138
139
  message="Expected to receive an error from REJECTED event",
139
- code=VellumErrorCode.INTERNAL_ERROR,
140
- )
141
- elif error.code in VellumErrorCode._value2member_map_:
142
- raise NodeException(
143
- message=error.message,
144
- code=VellumErrorCode(error.code),
145
- )
146
- else:
147
- raise NodeException(
148
- message=error.message,
149
- code=VellumErrorCode.INTERNAL_ERROR,
140
+ code=WorkflowErrorCode.INTERNAL_ERROR,
150
141
  )
142
+ workflow_error = workflow_event_error_to_workflow_error(error)
143
+ raise NodeException.of(workflow_error)
151
144
 
152
145
  if outputs is None:
153
146
  raise NodeException(
154
147
  message="Expected to receive outputs from Workflow Deployment",
155
- code=VellumErrorCode.INTERNAL_ERROR,
148
+ code=WorkflowErrorCode.INTERNAL_ERROR,
156
149
  )
157
150
 
158
151
  # For any outputs somehow in our final fulfilled outputs array,
@@ -11,8 +11,8 @@ from vellum import (
11
11
  StringVellumValue,
12
12
  VellumError,
13
13
  )
14
- from vellum.workflows.errors import VellumError as SdkVellumError
15
- from vellum.workflows.errors.types import VellumErrorCode
14
+ from vellum.workflows.errors import WorkflowError as SdkVellumError
15
+ from vellum.workflows.errors.types import WorkflowErrorCode
16
16
  from vellum.workflows.inputs import BaseInputs
17
17
  from vellum.workflows.nodes import InlinePromptNode
18
18
  from vellum.workflows.nodes.core.try_node.node import TryNode
@@ -104,7 +104,7 @@ def test_inline_text_prompt_node__catch_provider_error(vellum_adhoc_prompt_clien
104
104
  class State(BaseState):
105
105
  pass
106
106
 
107
- @TryNode.wrap(on_error_code=VellumErrorCode.PROVIDER_ERROR)
107
+ @TryNode.wrap(on_error_code=WorkflowErrorCode.PROVIDER_ERROR)
108
108
  class MyInlinePromptNode(InlinePromptNode):
109
109
  ml_model = "gpt-4o"
110
110
  prompt_inputs = {}
@@ -143,7 +143,7 @@ def test_inline_text_prompt_node__catch_provider_error(vellum_adhoc_prompt_clien
143
143
  name="error",
144
144
  value=SdkVellumError(
145
145
  message="OpenAI failed",
146
- code=VellumErrorCode.PROVIDER_ERROR,
146
+ code=WorkflowErrorCode.PROVIDER_ERROR,
147
147
  ),
148
148
  )
149
149
  in outputs
@@ -2,7 +2,6 @@ from functools import cache
2
2
  from typing import Type
3
3
 
4
4
  from vellum.workflows.nodes import BaseNode
5
- from vellum.workflows.references import NodeReference
6
5
  from vellum.workflows.types.generics import NodeType
7
6
 
8
7
  ADORNMENT_MODULE_NAME = "<adornment>"
@@ -10,20 +9,17 @@ ADORNMENT_MODULE_NAME = "<adornment>"
10
9
 
11
10
  @cache
12
11
  def get_wrapped_node(node: Type[NodeType]) -> Type[BaseNode]:
13
- if hasattr(node, "subworkflow"):
14
- subworkflow = node.subworkflow
15
- if isinstance(subworkflow, NodeReference) and subworkflow.instance:
16
- graph = subworkflow.instance.graph
17
- if issubclass(graph, BaseNode):
18
- return graph
12
+ wrapped_node = getattr(node, "__wrapped_node__", None)
13
+ if wrapped_node is None:
14
+ raise AttributeError("Wrapped node not found")
19
15
 
20
- raise TypeError("Wrapped subworkflow contains more than one node")
16
+ return wrapped_node
21
17
 
22
18
 
23
19
  def has_wrapped_node(node: Type[NodeType]) -> bool:
24
20
  try:
25
21
  get_wrapped_node(node)
26
- except TypeError:
22
+ except AttributeError:
27
23
  return False
28
24
 
29
25
  return True
@@ -5,7 +5,7 @@ from pydantic_core import core_schema
5
5
 
6
6
  from vellum.workflows.constants import UNDEF
7
7
  from vellum.workflows.descriptors.base import BaseDescriptor
8
- from vellum.workflows.errors.types import VellumErrorCode
8
+ from vellum.workflows.errors.types import WorkflowErrorCode
9
9
  from vellum.workflows.exceptions import NodeException
10
10
 
11
11
  if TYPE_CHECKING:
@@ -40,7 +40,7 @@ class ExternalInputReference(BaseDescriptor[_InputType], Generic[_InputType]):
40
40
  if state.meta.parent:
41
41
  return self.resolve(state.meta.parent)
42
42
 
43
- raise NodeException(f"Missing required Node Input: {self._name}", code=VellumErrorCode.INVALID_INPUTS)
43
+ raise NodeException(f"Missing required Node Input: {self._name}", code=WorkflowErrorCode.INVALID_INPUTS)
44
44
 
45
45
  @classmethod
46
46
  def __get_pydantic_core_schema__(
@@ -4,7 +4,7 @@ from pydantic import GetCoreSchemaHandler
4
4
  from pydantic_core import core_schema
5
5
 
6
6
  from vellum.workflows.descriptors.base import BaseDescriptor
7
- from vellum.workflows.errors.types import VellumErrorCode
7
+ from vellum.workflows.errors.types import WorkflowErrorCode
8
8
  from vellum.workflows.exceptions import NodeException
9
9
 
10
10
  if TYPE_CHECKING:
@@ -30,7 +30,7 @@ class NodeReference(BaseDescriptor[_T]):
30
30
  def resolve(self, state: "BaseState") -> _T:
31
31
  raise NodeException(
32
32
  f"NodeDescriptors cannot be resolved during runtime. Got: {self._name}",
33
- code=VellumErrorCode.INTERNAL_ERROR,
33
+ code=WorkflowErrorCode.INTERNAL_ERROR,
34
34
  )
35
35
 
36
36
  def __repr__(self) -> str:
@@ -1,7 +1,7 @@
1
1
  from typing import TYPE_CHECKING, TypeVar, cast
2
2
 
3
3
  from vellum.workflows.descriptors.base import BaseDescriptor
4
- from vellum.workflows.errors.types import VellumErrorCode
4
+ from vellum.workflows.errors.types import WorkflowErrorCode
5
5
  from vellum.workflows.exceptions import NodeException
6
6
 
7
7
  if TYPE_CHECKING:
@@ -20,4 +20,4 @@ class StateValueReference(BaseDescriptor[_T]):
20
20
  if state.meta.parent:
21
21
  return self.resolve(state.meta.parent)
22
22
 
23
- raise NodeException(f"Missing required Workflow state: {self._name}", code=VellumErrorCode.INVALID_STATE)
23
+ raise NodeException(f"Missing required Workflow state: {self._name}", code=WorkflowErrorCode.INVALID_STATE)
@@ -1,7 +1,7 @@
1
1
  from typing import TYPE_CHECKING, Generic, Optional, Tuple, Type, TypeVar, cast
2
2
 
3
3
  from vellum.workflows.descriptors.base import BaseDescriptor
4
- from vellum.workflows.errors.types import VellumErrorCode
4
+ from vellum.workflows.errors.types import WorkflowErrorCode
5
5
  from vellum.workflows.exceptions import NodeException
6
6
 
7
7
  if TYPE_CHECKING:
@@ -35,7 +35,7 @@ class WorkflowInputReference(BaseDescriptor[_InputType], Generic[_InputType]):
35
35
  if state.meta.parent:
36
36
  return self.resolve(state.meta.parent)
37
37
 
38
- raise NodeException(f"Missing required Workflow input: {self._name}", code=VellumErrorCode.INVALID_INPUTS)
38
+ raise NodeException(f"Missing required Workflow input: {self._name}", code=WorkflowErrorCode.INVALID_INPUTS)
39
39
 
40
40
  def __repr__(self) -> str:
41
41
  return f"{self._inputs_class.__qualname__}.{self.name}"