vellum-ai 0.10.3__py3-none-any.whl → 0.10.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. vellum/client/core/client_wrapper.py +1 -1
  2. vellum/workflows/events/tests/test_event.py +30 -0
  3. vellum/workflows/events/types.py +57 -3
  4. vellum/workflows/nodes/__init__.py +6 -7
  5. vellum/workflows/nodes/bases/base.py +0 -1
  6. vellum/workflows/nodes/core/inline_subworkflow_node/node.py +2 -1
  7. vellum/workflows/nodes/core/map_node/node.py +1 -1
  8. vellum/workflows/nodes/core/retry_node/node.py +1 -0
  9. vellum/workflows/nodes/core/templating_node/node.py +5 -1
  10. vellum/workflows/nodes/core/try_node/node.py +66 -27
  11. vellum/workflows/nodes/core/try_node/tests/test_node.py +39 -8
  12. vellum/workflows/nodes/displayable/__init__.py +2 -0
  13. vellum/workflows/nodes/displayable/bases/api_node/node.py +3 -3
  14. vellum/workflows/nodes/displayable/code_execution_node/node.py +5 -2
  15. vellum/workflows/nodes/displayable/final_output_node/node.py +6 -2
  16. vellum/workflows/nodes/displayable/note_node/__init__.py +5 -0
  17. vellum/workflows/nodes/displayable/note_node/node.py +10 -0
  18. vellum/workflows/nodes/displayable/tests/test_inline_text_prompt_node.py +10 -11
  19. vellum/workflows/nodes/utils.py +2 -0
  20. vellum/workflows/outputs/base.py +26 -2
  21. vellum/workflows/runner/runner.py +41 -27
  22. vellum/workflows/state/tests/test_state.py +2 -0
  23. vellum/workflows/types/tests/test_utils.py +9 -0
  24. vellum/workflows/types/utils.py +1 -1
  25. vellum/workflows/utils/vellum_variables.py +13 -1
  26. vellum/workflows/workflows/base.py +24 -1
  27. {vellum_ai-0.10.3.dist-info → vellum_ai-0.10.6.dist-info}/METADATA +8 -6
  28. {vellum_ai-0.10.3.dist-info → vellum_ai-0.10.6.dist-info}/RECORD +61 -56
  29. vellum_cli/CONTRIBUTING.md +66 -0
  30. vellum_cli/README.md +3 -0
  31. vellum_ee/workflows/display/base.py +2 -1
  32. vellum_ee/workflows/display/nodes/base_node_display.py +27 -4
  33. vellum_ee/workflows/display/nodes/vellum/__init__.py +2 -0
  34. vellum_ee/workflows/display/nodes/vellum/api_node.py +3 -3
  35. vellum_ee/workflows/display/nodes/vellum/code_execution_node.py +4 -4
  36. vellum_ee/workflows/display/nodes/vellum/conditional_node.py +86 -41
  37. vellum_ee/workflows/display/nodes/vellum/guardrail_node.py +3 -3
  38. vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +4 -5
  39. vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py +9 -9
  40. vellum_ee/workflows/display/nodes/vellum/map_node.py +5 -5
  41. vellum_ee/workflows/display/nodes/vellum/note_node.py +32 -0
  42. vellum_ee/workflows/display/nodes/vellum/prompt_deployment_node.py +5 -5
  43. vellum_ee/workflows/display/nodes/vellum/search_node.py +6 -10
  44. vellum_ee/workflows/display/nodes/vellum/subworkflow_deployment_node.py +2 -2
  45. vellum_ee/workflows/display/nodes/vellum/templating_node.py +4 -5
  46. vellum_ee/workflows/display/nodes/vellum/try_node.py +16 -4
  47. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_code_execution_node_serialization.py +7 -3
  48. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_conditional_node_serialization.py +127 -101
  49. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_guardrail_node_serialization.py +6 -5
  50. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_inline_subworkflow_serialization.py +77 -64
  51. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_map_node_serialization.py +4 -3
  52. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_prompt_deployment_serialization.py +6 -6
  53. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_subworkflow_deployment_serialization.py +6 -6
  54. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_terminal_node_serialization.py +4 -3
  55. vellum_ee/workflows/display/tests/workflow_serialization/test_complex_terminal_node_serialization.py +7 -6
  56. vellum_ee/workflows/display/workflows/base_workflow_display.py +14 -9
  57. vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py +2 -7
  58. vellum_ee/workflows/display/workflows/vellum_workflow_display.py +18 -16
  59. {vellum_ai-0.10.3.dist-info → vellum_ai-0.10.6.dist-info}/LICENSE +0 -0
  60. {vellum_ai-0.10.3.dist-info → vellum_ai-0.10.6.dist-info}/WHEEL +0 -0
  61. {vellum_ai-0.10.3.dist-info → vellum_ai-0.10.6.dist-info}/entry_points.txt +0 -0
@@ -17,7 +17,7 @@ class BaseClientWrapper:
17
17
  headers: typing.Dict[str, str] = {
18
18
  "X-Fern-Language": "Python",
19
19
  "X-Fern-SDK-Name": "vellum-ai",
20
- "X-Fern-SDK-Version": "0.10.3",
20
+ "X-Fern-SDK-Version": "0.10.6",
21
21
  }
22
22
  headers["X_API_KEY"] = self.api_key
23
23
  return headers
@@ -7,6 +7,7 @@ from deepdiff import DeepDiff
7
7
 
8
8
  from vellum.workflows.errors.types import VellumError, VellumErrorCode
9
9
  from vellum.workflows.events.node import NodeExecutionInitiatedBody, NodeExecutionInitiatedEvent
10
+ from vellum.workflows.events.types import NodeParentContext, WorkflowParentContext
10
11
  from vellum.workflows.events.workflow import (
11
12
  WorkflowExecutionFulfilledBody,
12
13
  WorkflowExecutionFulfilledEvent,
@@ -73,6 +74,7 @@ module_root = name_parts[: name_parts.index("events")]
73
74
  "foo": "bar",
74
75
  },
75
76
  },
77
+ "parent": None,
76
78
  },
77
79
  ),
78
80
  (
@@ -87,6 +89,14 @@ module_root = name_parts[: name_parts.index("events")]
87
89
  MockNode.node_foo: "bar",
88
90
  },
89
91
  ),
92
+ parent=NodeParentContext(
93
+ node_definition=MockNode,
94
+ span_id=UUID("123e4567-e89b-12d3-a456-426614174000"),
95
+ parent=WorkflowParentContext(
96
+ workflow_definition=MockWorkflow,
97
+ span_id=UUID("123e4567-e89b-12d3-a456-426614174000")
98
+ )
99
+ )
90
100
  ),
91
101
  {
92
102
  "id": "123e4567-e89b-12d3-a456-426614174000",
@@ -104,6 +114,23 @@ module_root = name_parts[: name_parts.index("events")]
104
114
  "node_foo": "bar",
105
115
  },
106
116
  },
117
+ "parent": {
118
+ "node_definition": {
119
+ "name": "MockNode",
120
+ "module": module_root + ["events", "tests", "test_event"],
121
+ },
122
+ "parent": {
123
+ "workflow_definition": {
124
+ "name": "MockWorkflow",
125
+ "module": module_root + ["events", "tests", "test_event"],
126
+ },
127
+ "type": "WORKFLOW",
128
+ "parent": None,
129
+ "span_id": "123e4567-e89b-12d3-a456-426614174000"
130
+ },
131
+ "type": "WORKFLOW_NODE",
132
+ "span_id": "123e4567-e89b-12d3-a456-426614174000"
133
+ },
107
134
  },
108
135
  ),
109
136
  (
@@ -137,6 +164,7 @@ module_root = name_parts[: name_parts.index("events")]
137
164
  "value": "foo",
138
165
  },
139
166
  },
167
+ "parent": None
140
168
  },
141
169
  ),
142
170
  (
@@ -168,6 +196,7 @@ module_root = name_parts[: name_parts.index("events")]
168
196
  "example": "foo",
169
197
  },
170
198
  },
199
+ "parent": None,
171
200
  },
172
201
  ),
173
202
  (
@@ -201,6 +230,7 @@ module_root = name_parts[: name_parts.index("events")]
201
230
  "code": "USER_DEFINED_ERROR",
202
231
  },
203
232
  },
233
+ "parent": None,
204
234
  },
205
235
  ),
206
236
  ],
@@ -2,15 +2,18 @@ from datetime import datetime
2
2
  from enum import Enum
3
3
  import json
4
4
  from uuid import UUID, uuid4
5
- from typing import Any, Dict, Literal
5
+ from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Type, Union
6
6
 
7
- from pydantic import Field
7
+ from pydantic import Field, field_serializer
8
8
 
9
9
  from vellum.core.pydantic_utilities import UniversalBaseModel
10
-
11
10
  from vellum.workflows.state.encoder import DefaultStateEncoder
12
11
  from vellum.workflows.types.utils import datetime_now
13
12
 
13
+ if TYPE_CHECKING:
14
+ from vellum.workflows.nodes.bases.base import BaseNode
15
+ from vellum.workflows.workflows.base import BaseWorkflow
16
+
14
17
 
15
18
  class WorkflowEventType(Enum):
16
19
  NODE = "NODE"
@@ -44,9 +47,60 @@ def default_serializer(obj: Any) -> Any:
44
47
  )
45
48
 
46
49
 
50
+ class BaseParentContext(UniversalBaseModel):
51
+ span_id: UUID
52
+ parent: Optional['ParentContext'] = None
53
+
54
+
55
+ class BaseDeploymentParentContext(BaseParentContext):
56
+ deployment_id: UUID
57
+ deployment_name: str
58
+ deployment_history_item_id: UUID
59
+ release_tag_id: UUID
60
+ release_tag_name: str
61
+ external_id: Optional[str]
62
+
63
+
64
+ class WorkflowDeploymentParentContext(BaseDeploymentParentContext):
65
+ type: Literal["WORKFLOW_RELEASE_TAG"] = "WORKFLOW_RELEASE_TAG"
66
+ workflow_version_id: UUID
67
+
68
+
69
+ class PromptDeploymentParentContext(BaseDeploymentParentContext):
70
+ type: Literal["PROMPT_RELEASE_TAG"] = "PROMPT_RELEASE_TAG"
71
+ prompt_version_id: UUID
72
+
73
+
74
+ class NodeParentContext(BaseParentContext):
75
+ type: Literal["WORKFLOW_NODE"] = "WORKFLOW_NODE"
76
+ node_definition: Type['BaseNode']
77
+
78
+ @field_serializer("node_definition")
79
+ def serialize_node_definition(self, definition: Type, _info: Any) -> Dict[str, Any]:
80
+ return serialize_type_encoder(definition)
81
+
82
+
83
+ class WorkflowParentContext(BaseParentContext):
84
+ type: Literal["WORKFLOW"] = "WORKFLOW"
85
+ workflow_definition: Type['BaseWorkflow']
86
+
87
+ @field_serializer("workflow_definition")
88
+ def serialize_workflow_definition(self, definition: Type, _info: Any) -> Dict[str, Any]:
89
+ return serialize_type_encoder(definition)
90
+
91
+
92
+ ParentContext = Union[
93
+ NodeParentContext,
94
+ WorkflowParentContext,
95
+ PromptDeploymentParentContext,
96
+ WorkflowDeploymentParentContext,
97
+ ]
98
+
99
+
47
100
  class BaseEvent(UniversalBaseModel):
48
101
  id: UUID = Field(default_factory=uuid4)
49
102
  timestamp: datetime = Field(default_factory=default_datetime_factory)
50
103
  api_version: Literal["2024-10-25"] = "2024-10-25"
51
104
  trace_id: UUID
52
105
  span_id: UUID
106
+ parent: Optional['ParentContext'] = None
@@ -1,5 +1,5 @@
1
1
  from vellum.workflows.nodes.bases import BaseNode
2
- from vellum.workflows.nodes.core import (ErrorNode, InlineSubworkflowNode, MapNode, RetryNode, TemplatingNode, TryNode,)
2
+ from vellum.workflows.nodes.core import ErrorNode, InlineSubworkflowNode, MapNode, RetryNode, TemplatingNode, TryNode
3
3
  from vellum.workflows.nodes.displayable import (
4
4
  APINode,
5
5
  CodeExecutionNode,
@@ -7,6 +7,7 @@ from vellum.workflows.nodes.displayable import (
7
7
  FinalOutputNode,
8
8
  GuardrailNode,
9
9
  InlinePromptNode,
10
+ NoteNode,
10
11
  PromptDeploymentNode,
11
12
  SearchNode,
12
13
  SubworkflowDeploymentNode,
@@ -28,20 +29,18 @@ __all__ = [
28
29
  "TemplatingNode",
29
30
  "TryNode",
30
31
  # Displayable Base Nodes
31
- "BaseSearchNode",
32
32
  "BaseInlinePromptNode",
33
33
  "BasePromptDeploymentNode",
34
+ "BaseSearchNode",
34
35
  # Displayable Nodes
35
36
  "APINode",
36
37
  "CodeExecutionNode",
38
+ "ConditionalNode",
39
+ "FinalOutputNode",
37
40
  "GuardrailNode",
38
41
  "InlinePromptNode",
42
+ "NoteNode",
39
43
  "PromptDeploymentNode",
40
44
  "SearchNode",
41
- "ConditionalNode",
42
- "GuardrailNode",
43
45
  "SubworkflowDeploymentNode",
44
- "FinalOutputNode",
45
- "PromptDeploymentNode",
46
- "SearchNode",
47
46
  ]
@@ -215,7 +215,6 @@ class BaseNode(Generic[StateType], metaclass=BaseNodeMeta):
215
215
  # https://app.shortcut.com/vellum/story/4008/auto-inherit-basenodeoutputs-in-outputs-classes
216
216
  class Outputs(BaseOutputs):
217
217
  _node_class: Optional[Type["BaseNode"]] = None
218
- pass
219
218
 
220
219
  class Ports(NodePorts):
221
220
  default = Port(default=True)
@@ -25,6 +25,7 @@ class InlineSubworkflowNode(BaseSubworkflowNode[StateType], Generic[StateType, W
25
25
  def run(self) -> Iterator[BaseOutput]:
26
26
  subworkflow = self.subworkflow(
27
27
  parent_state=self.state,
28
+ context=self._context,
28
29
  )
29
30
  subworkflow_stream = subworkflow.stream(
30
31
  inputs=self._compile_subworkflow_inputs(),
@@ -56,7 +57,7 @@ class InlineSubworkflowNode(BaseSubworkflowNode[StateType], Generic[StateType, W
56
57
  if outputs is None:
57
58
  raise NodeException(
58
59
  message="Expected to receive outputs from Workflow Deployment",
59
- code=VellumErrorCode.INTERNAL_ERROR,
60
+ code=VellumErrorCode.INVALID_OUTPUTS,
60
61
  )
61
62
 
62
63
  # For any outputs somehow in our final fulfilled outputs array,
@@ -89,7 +89,7 @@ class MapNode(BaseNode, Generic[StateType, MapNodeItemType]):
89
89
  return self.Outputs(**mapped_items)
90
90
 
91
91
  def _run_subworkflow(self, *, item: MapNodeItemType, index: int) -> None:
92
- subworkflow = self.subworkflow(parent_state=self.state)
92
+ subworkflow = self.subworkflow(parent_state=self.state, context=self._context)
93
93
  events = subworkflow.stream(inputs=self.SubworkflowInputs(index=index, item=item, all_items=self.items))
94
94
 
95
95
  for event in events:
@@ -43,6 +43,7 @@ class RetryNode(BaseNode[StateType], Generic[StateType], metaclass=_RetryNodeMet
43
43
  attempt_number = index + 1
44
44
  subworkflow = self.subworkflow(
45
45
  parent_state=self.state,
46
+ context=self._context,
46
47
  )
47
48
  terminal_event = subworkflow.run(
48
49
  inputs=self.SubworkflowInputs(attempt_number=attempt_number),
@@ -49,7 +49,11 @@ class _TemplatingNodeMeta(BaseNodeMeta):
49
49
  if not isinstance(parent, _TemplatingNodeMeta):
50
50
  raise ValueError("TemplatingNode must be created with the TemplatingNodeMeta metaclass")
51
51
 
52
- parent.__dict__["Outputs"].__annotations__["result"] = parent.get_output_type()
52
+ annotations = parent.__dict__["Outputs"].__annotations__
53
+ parent.__dict__["Outputs"].__annotations__ = {
54
+ **annotations,
55
+ "result": parent.get_output_type(),
56
+ }
53
57
  return parent
54
58
 
55
59
  def get_output_type(cls) -> Type:
@@ -1,10 +1,13 @@
1
- from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Optional, Tuple, Type, TypeVar
1
+ import sys
2
+ from types import ModuleType
3
+ from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Iterator, Optional, Set, Tuple, Type, TypeVar, cast
2
4
 
3
5
  from vellum.workflows.errors.types import VellumError, VellumErrorCode
4
6
  from vellum.workflows.exceptions import NodeException
5
7
  from vellum.workflows.nodes.bases import BaseNode
6
8
  from vellum.workflows.nodes.bases.base import BaseNodeMeta
7
- from vellum.workflows.outputs.base import BaseOutputs
9
+ from vellum.workflows.nodes.utils import ADORNMENT_MODULE_NAME
10
+ from vellum.workflows.outputs.base import BaseOutput, BaseOutputs
8
11
  from vellum.workflows.types.generics import StateType
9
12
 
10
13
  if TYPE_CHECKING:
@@ -56,33 +59,60 @@ class TryNode(BaseNode[StateType], Generic[StateType], metaclass=_TryNodeMeta):
56
59
  class Outputs(BaseNode.Outputs):
57
60
  error: Optional[VellumError] = None
58
61
 
59
- def run(self) -> Outputs:
62
+ def run(self) -> Iterator[BaseOutput]:
60
63
  subworkflow = self.subworkflow(
61
64
  parent_state=self.state,
65
+ context=self._context,
62
66
  )
63
- terminal_event = subworkflow.run()
64
-
65
- if terminal_event.name == "workflow.execution.fulfilled":
66
- outputs = self.Outputs()
67
- for descriptor, value in terminal_event.outputs:
68
- setattr(outputs, descriptor.name, value)
69
- return outputs
70
- elif terminal_event.name == "workflow.execution.paused":
67
+ subworkflow_stream = subworkflow.stream()
68
+
69
+ outputs: Optional[BaseOutputs] = None
70
+ exception: Optional[NodeException] = None
71
+ fulfilled_output_names: Set[str] = set()
72
+
73
+ for event in subworkflow_stream:
74
+ if exception:
75
+ continue
76
+
77
+ if event.name == "workflow.execution.streaming":
78
+ if event.output.is_fulfilled:
79
+ fulfilled_output_names.add(event.output.name)
80
+ yield event.output
81
+ elif event.name == "workflow.execution.fulfilled":
82
+ outputs = event.outputs
83
+ elif event.name == "workflow.execution.paused":
84
+ exception = NodeException(
85
+ code=VellumErrorCode.INVALID_OUTPUTS,
86
+ message="Subworkflow unexpectedly paused within Try Node",
87
+ )
88
+ elif event.name == "workflow.execution.rejected":
89
+ if self.on_error_code and self.on_error_code != event.error.code:
90
+ exception = NodeException(
91
+ code=VellumErrorCode.INVALID_OUTPUTS,
92
+ message=f"""Unexpected rejection: {event.error.code.value}.
93
+ Message: {event.error.message}""",
94
+ )
95
+ else:
96
+ outputs = self.Outputs(error=event.error)
97
+
98
+ if exception:
99
+ raise exception
100
+
101
+ if outputs is None:
71
102
  raise NodeException(
72
103
  code=VellumErrorCode.INVALID_OUTPUTS,
73
- message="Subworkflow unexpectedly paused within Try Node",
74
- )
75
- elif self.on_error_code and self.on_error_code != terminal_event.error.code:
76
- raise NodeException(
77
- code=VellumErrorCode.INVALID_OUTPUTS,
78
- message=f"""Unexpected rejection: {terminal_event.error.code.value}.
79
- Message: {terminal_event.error.message}""",
80
- )
81
- else:
82
- return self.Outputs(
83
- error=terminal_event.error,
104
+ message="Expected to receive outputs from Try Node's subworkflow",
84
105
  )
85
106
 
107
+ # For any outputs somehow in our final fulfilled outputs array,
108
+ # but not fulfilled by the stream.
109
+ for descriptor, value in outputs:
110
+ if descriptor.name not in fulfilled_output_names:
111
+ yield BaseOutput(
112
+ name=descriptor.name,
113
+ value=value,
114
+ )
115
+
86
116
  @classmethod
87
117
  def wrap(cls, on_error_code: Optional[VellumErrorCode] = None) -> Callable[..., Type["TryNode"]]:
88
118
  _on_error_code = on_error_code
@@ -100,11 +130,20 @@ Message: {terminal_event.error.message}""",
100
130
  class Outputs(inner_cls.Outputs): # type: ignore[name-defined]
101
131
  pass
102
132
 
103
- class WrappedNode(TryNode[StateType]):
104
- on_error_code = _on_error_code
105
-
106
- subworkflow = Subworkflow
107
-
133
+ dynamic_module = f"{inner_cls.__module__}.{inner_cls.__name__}.{ADORNMENT_MODULE_NAME}"
134
+ # This dynamic module allows calls to `type_hints` to work
135
+ sys.modules[dynamic_module] = ModuleType(dynamic_module)
136
+
137
+ # We use a dynamic wrapped node class to be uniquely tied to this `inner_cls` node during serialization
138
+ WrappedNode = type(
139
+ cls.__name__,
140
+ (TryNode,),
141
+ {
142
+ "__module__": dynamic_module,
143
+ "on_error_code": _on_error_code,
144
+ "subworkflow": Subworkflow,
145
+ },
146
+ )
108
147
  return WrappedNode
109
148
 
110
149
  return decorator
@@ -1,12 +1,15 @@
1
1
  import pytest
2
2
 
3
+ from vellum.client import Vellum
3
4
  from vellum.workflows.errors.types import VellumError, VellumErrorCode
4
5
  from vellum.workflows.exceptions import NodeException
5
6
  from vellum.workflows.inputs.base import BaseInputs
6
7
  from vellum.workflows.nodes.bases import BaseNode
7
8
  from vellum.workflows.nodes.core.try_node.node import TryNode
8
9
  from vellum.workflows.outputs import BaseOutputs
10
+ from vellum.workflows.outputs.base import BaseOutput
9
11
  from vellum.workflows.state.base import BaseState, StateMeta
12
+ from vellum.workflows.state.context import WorkflowContext
10
13
 
11
14
 
12
15
  def test_try_node__on_error_code__successfully_caught():
@@ -21,11 +24,15 @@ def test_try_node__on_error_code__successfully_caught():
21
24
 
22
25
  # WHEN the node is run and throws a PROVIDER_ERROR
23
26
  node = TestNode(state=BaseState())
24
- outputs = node.run()
25
-
26
- # THEN the exception is retried
27
- assert outputs == {
28
- "error": VellumError(message="This will be caught", code=VellumErrorCode.PROVIDER_ERROR),
27
+ outputs = [o for o in node.run()]
28
+
29
+ # THEN the exception is caught and returned
30
+ assert len(outputs) == 2
31
+ assert set(outputs) == {
32
+ BaseOutput(name="value"),
33
+ BaseOutput(
34
+ name="error", value=VellumError(message="This will be caught", code=VellumErrorCode.PROVIDER_ERROR)
35
+ ),
29
36
  }
30
37
 
31
38
 
@@ -42,7 +49,7 @@ def test_try_node__retry_on_error_code__missed():
42
49
  # WHEN the node is run and throws a different exception
43
50
  node = TestNode(state=BaseState())
44
51
  with pytest.raises(NodeException) as exc_info:
45
- node.run()
52
+ list(node.run())
46
53
 
47
54
  # THEN the exception is not caught
48
55
  assert exc_info.value.message == "Unexpected rejection: INTERNAL_ERROR.\nMessage: This will be missed"
@@ -76,7 +83,31 @@ def test_try_node__use_parent_inputs_and_state():
76
83
  meta=StateMeta(workflow_inputs=Inputs(foo="foo")),
77
84
  ),
78
85
  )
79
- outputs = node.run()
86
+ outputs = list(node.run())
80
87
 
81
88
  # THEN the data is used successfully
82
- assert outputs == {"value": "foo bar"}
89
+ assert len(outputs) == 1
90
+ assert outputs[-1] == BaseOutput(name="value", value="foo bar")
91
+
92
+
93
+ def test_try_node__use_parent_execution_context():
94
+ # GIVEN a try node that uses node context to use the vellum client
95
+ @TryNode.wrap()
96
+ class TestNode(BaseNode):
97
+ class Outputs(BaseOutputs):
98
+ key: str
99
+
100
+ def run(self) -> Outputs:
101
+ return self.Outputs(key=self._context.vellum_client.ad_hoc._client_wrapper.api_key)
102
+
103
+ # WHEN the node is run with a custom vellum client
104
+ node = TestNode(
105
+ context=WorkflowContext(
106
+ _vellum_client=Vellum(api_key="test-key"),
107
+ )
108
+ )
109
+ outputs = list(node.run())
110
+
111
+ # THEN the inner node had access to the key
112
+ assert len(outputs) == 1
113
+ assert outputs[-1] == BaseOutput(name="key", value="test-key")
@@ -9,6 +9,7 @@ from .final_output_node import FinalOutputNode
9
9
  from .guardrail_node import GuardrailNode
10
10
  from .inline_prompt_node import InlinePromptNode
11
11
  from .merge_node import MergeNode
12
+ from .note_node import NoteNode
12
13
  from .prompt_deployment_node import PromptDeploymentNode
13
14
  from .search_node import SearchNode
14
15
  from .subworkflow_deployment_node import SubworkflowDeploymentNode
@@ -23,6 +24,7 @@ __all__ = [
23
24
  "GuardrailNode",
24
25
  "MapNode",
25
26
  "MergeNode",
27
+ "NoteNode",
26
28
  "SubworkflowDeploymentNode",
27
29
  "PromptDeploymentNode",
28
30
  "SearchNode",
@@ -8,7 +8,7 @@ from vellum.workflows.errors.types import VellumErrorCode
8
8
  from vellum.workflows.exceptions import NodeException
9
9
  from vellum.workflows.nodes.bases import BaseNode
10
10
  from vellum.workflows.outputs import BaseOutputs
11
- from vellum.workflows.types.core import JsonObject, VellumSecret
11
+ from vellum.workflows.types.core import Json, JsonObject, VellumSecret
12
12
  from vellum.workflows.types.generics import StateType
13
13
 
14
14
 
@@ -26,11 +26,11 @@ class BaseAPINode(BaseNode, Generic[StateType]):
26
26
  url: str
27
27
  method: APIRequestMethod
28
28
  data: Optional[str] = None
29
- json: Optional["JsonObject"] = None
29
+ json: Optional["Json"] = None
30
30
  headers: Optional[Dict[str, Union[str, VellumSecret]]] = None
31
31
 
32
32
  class Outputs(BaseOutputs):
33
- json: Optional["JsonObject"]
33
+ json: Optional["Json"]
34
34
  headers: Dict[str, str]
35
35
  status_code: int
36
36
  text: str
@@ -19,7 +19,6 @@ from vellum import (
19
19
  VellumValue,
20
20
  )
21
21
  from vellum.core import RequestOptions
22
-
23
22
  from vellum.workflows.errors.types import VellumErrorCode
24
23
  from vellum.workflows.exceptions import NodeException
25
24
  from vellum.workflows.nodes.bases import BaseNode
@@ -44,7 +43,11 @@ class _CodeExecutionNodeMeta(BaseNodeMeta):
44
43
  if not isinstance(parent, _CodeExecutionNodeMeta):
45
44
  raise ValueError("CodeExecutionNode must be created with the CodeExecutionNodeMeta metaclass")
46
45
 
47
- parent.__dict__["Outputs"].__annotations__["result"] = parent.get_output_type()
46
+ annotations = parent.__dict__["Outputs"].__annotations__
47
+ parent.__dict__["Outputs"].__annotations__ = {
48
+ **annotations,
49
+ "result": parent.get_output_type(),
50
+ }
48
51
  return parent
49
52
 
50
53
  def get_output_type(cls) -> Type:
@@ -16,9 +16,13 @@ class _FinalOutputNodeMeta(BaseNodeMeta):
16
16
 
17
17
  # We use the compiled class to infer the output type for the Outputs.value descriptor.
18
18
  if not isinstance(parent, _FinalOutputNodeMeta):
19
- raise ValueError("CodeExecutionNode must be created with the CodeExecutionNodeMeta metaclass")
19
+ raise ValueError("FinalOutputNode must be created with the FinalOutputNodeMeta metaclass")
20
20
 
21
- parent.__dict__["Outputs"].__annotations__["value"] = parent.get_output_type()
21
+ annotations = parent.__dict__["Outputs"].__annotations__
22
+ parent.__dict__["Outputs"].__annotations__ = {
23
+ **annotations,
24
+ "value": parent.get_output_type(),
25
+ }
22
26
  return parent
23
27
 
24
28
  def get_output_type(cls) -> Type:
@@ -0,0 +1,5 @@
1
+ from .node import NoteNode
2
+
3
+ __all__ = [
4
+ "NoteNode",
5
+ ]
@@ -0,0 +1,10 @@
1
+ from vellum.workflows.nodes.bases import BaseNode
2
+
3
+
4
+ class NoteNode(BaseNode):
5
+ """
6
+ A no-op Node purely used to display a note in the Vellum UI.
7
+ """
8
+
9
+ def run(self) -> BaseNode.Outputs:
10
+ raise RuntimeError("NoteNode should never be run")
@@ -11,13 +11,12 @@ from vellum import (
11
11
  StringVellumValue,
12
12
  VellumError,
13
13
  )
14
-
15
- from vellum.workflows.constants import UNDEF
16
- from vellum.workflows.errors import VellumError as WacVellumError
14
+ from vellum.workflows.errors import VellumError as SdkVellumError
17
15
  from vellum.workflows.errors.types import VellumErrorCode
18
16
  from vellum.workflows.inputs import BaseInputs
19
17
  from vellum.workflows.nodes import InlinePromptNode
20
18
  from vellum.workflows.nodes.core.try_node.node import TryNode
19
+ from vellum.workflows.outputs.base import BaseOutput
21
20
  from vellum.workflows.state import BaseState
22
21
  from vellum.workflows.state.base import StateMeta
23
22
 
@@ -136,13 +135,13 @@ def test_inline_text_prompt_node__catch_provider_error(vellum_adhoc_prompt_clien
136
135
  meta=StateMeta(workflow_inputs=Inputs(input="Say something.")),
137
136
  )
138
137
  )
139
- outputs = node.run()
138
+ outputs = list(node.run())
140
139
 
141
140
  # THEN the node should have produced the outputs we expect
142
- # We need mypy support for annotations to remove these type ignores
143
- # https://app.shortcut.com/vellum/story/4890
144
- assert outputs.error == WacVellumError( # type: ignore[attr-defined]
145
- message="OpenAI failed",
146
- code=VellumErrorCode.PROVIDER_ERROR,
147
- )
148
- assert outputs.text is UNDEF # type: ignore[attr-defined]
141
+ assert BaseOutput(
142
+ name="error",
143
+ value=SdkVellumError(
144
+ message="OpenAI failed",
145
+ code=VellumErrorCode.PROVIDER_ERROR,
146
+ ),
147
+ ) in outputs
@@ -5,6 +5,8 @@ from vellum.workflows.nodes import BaseNode
5
5
  from vellum.workflows.references import NodeReference
6
6
  from vellum.workflows.types.generics import NodeType
7
7
 
8
+ ADORNMENT_MODULE_NAME = "<adornment>"
9
+
8
10
 
9
11
  @cache
10
12
  def get_wrapped_node(node: Type[NodeType]) -> Type[BaseNode]: