vellum-ai 0.14.16__py3-none-any.whl → 0.14.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. vellum/__init__.py +2 -0
  2. vellum/client/core/client_wrapper.py +1 -1
  3. vellum/client/types/__init__.py +2 -0
  4. vellum/client/types/release.py +21 -0
  5. vellum/client/types/workflow_release_tag_read.py +7 -1
  6. vellum/prompts/blocks/compilation.py +14 -0
  7. vellum/types/release.py +3 -0
  8. vellum/workflows/nodes/bases/base.py +7 -7
  9. vellum/workflows/nodes/bases/base_adornment_node.py +2 -0
  10. vellum/workflows/nodes/core/retry_node/node.py +1 -1
  11. vellum/workflows/nodes/core/try_node/node.py +1 -1
  12. vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +4 -0
  13. vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +27 -1
  14. vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/__init__.py +0 -0
  15. vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py +182 -0
  16. vellum/workflows/nodes/displayable/inline_prompt_node/node.py +4 -1
  17. vellum/workflows/nodes/experimental/openai_chat_completion_node/node.py +7 -1
  18. vellum/workflows/utils/tests/test_vellum_variables.py +7 -1
  19. vellum/workflows/utils/vellum_variables.py +4 -0
  20. {vellum_ai-0.14.16.dist-info → vellum_ai-0.14.17.dist-info}/METADATA +1 -1
  21. {vellum_ai-0.14.16.dist-info → vellum_ai-0.14.17.dist-info}/RECORD +29 -25
  22. vellum_ee/workflows/display/nodes/base_node_display.py +35 -29
  23. vellum_ee/workflows/display/nodes/get_node_display_class.py +0 -9
  24. vellum_ee/workflows/display/nodes/vellum/base_adornment_node.py +38 -18
  25. vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +1 -0
  26. vellum_ee/workflows/display/workflows/tests/test_workflow_display.py +51 -7
  27. {vellum_ai-0.14.16.dist-info → vellum_ai-0.14.17.dist-info}/LICENSE +0 -0
  28. {vellum_ai-0.14.16.dist-info → vellum_ai-0.14.17.dist-info}/WHEEL +0 -0
  29. {vellum_ai-0.14.16.dist-info → vellum_ai-0.14.17.dist-info}/entry_points.txt +0 -0
vellum/__init__.py CHANGED
@@ -296,6 +296,7 @@ from .types import (
296
296
  RejectedExecuteWorkflowWorkflowResultEvent,
297
297
  RejectedPromptExecutionMeta,
298
298
  RejectedWorkflowNodeResultEvent,
299
+ Release,
299
300
  ReleaseTagSource,
300
301
  ReplaceTestSuiteTestCaseRequest,
301
302
  RichTextChildBlock,
@@ -844,6 +845,7 @@ __all__ = [
844
845
  "RejectedExecuteWorkflowWorkflowResultEvent",
845
846
  "RejectedPromptExecutionMeta",
846
847
  "RejectedWorkflowNodeResultEvent",
848
+ "Release",
847
849
  "ReleaseTagSource",
848
850
  "ReplaceTestSuiteTestCaseRequest",
849
851
  "RichTextChildBlock",
@@ -18,7 +18,7 @@ class BaseClientWrapper:
18
18
  headers: typing.Dict[str, str] = {
19
19
  "X-Fern-Language": "Python",
20
20
  "X-Fern-SDK-Name": "vellum-ai",
21
- "X-Fern-SDK-Version": "0.14.16",
21
+ "X-Fern-SDK-Version": "0.14.17",
22
22
  }
23
23
  headers["X_API_KEY"] = self.api_key
24
24
  return headers
@@ -304,6 +304,7 @@ from .rejected_execute_prompt_response import RejectedExecutePromptResponse
304
304
  from .rejected_execute_workflow_workflow_result_event import RejectedExecuteWorkflowWorkflowResultEvent
305
305
  from .rejected_prompt_execution_meta import RejectedPromptExecutionMeta
306
306
  from .rejected_workflow_node_result_event import RejectedWorkflowNodeResultEvent
307
+ from .release import Release
307
308
  from .release_tag_source import ReleaseTagSource
308
309
  from .replace_test_suite_test_case_request import ReplaceTestSuiteTestCaseRequest
309
310
  from .rich_text_child_block import RichTextChildBlock
@@ -825,6 +826,7 @@ __all__ = [
825
826
  "RejectedExecuteWorkflowWorkflowResultEvent",
826
827
  "RejectedPromptExecutionMeta",
827
828
  "RejectedWorkflowNodeResultEvent",
829
+ "Release",
828
830
  "ReleaseTagSource",
829
831
  "ReplaceTestSuiteTestCaseRequest",
830
832
  "RichTextChildBlock",
@@ -0,0 +1,21 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from ..core.pydantic_utilities import UniversalBaseModel
4
+ import datetime as dt
5
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2
6
+ import typing
7
+ import pydantic
8
+
9
+
10
+ class Release(UniversalBaseModel):
11
+ id: str
12
+ timestamp: dt.datetime
13
+
14
+ if IS_PYDANTIC_V2:
15
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
16
+ else:
17
+
18
+ class Config:
19
+ frozen = True
20
+ smart_union = True
21
+ extra = pydantic.Extra.allow
@@ -4,6 +4,7 @@ from ..core.pydantic_utilities import UniversalBaseModel
4
4
  import pydantic
5
5
  from .release_tag_source import ReleaseTagSource
6
6
  from .workflow_release_tag_workflow_deployment_history_item import WorkflowReleaseTagWorkflowDeploymentHistoryItem
7
+ from .release import Release
7
8
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
8
9
  import typing
9
10
 
@@ -24,7 +25,12 @@ class WorkflowReleaseTagRead(UniversalBaseModel):
24
25
 
25
26
  history_item: WorkflowReleaseTagWorkflowDeploymentHistoryItem = pydantic.Field()
26
27
  """
27
- The Workflow Deployment History Item that this Release Tag is associated with
28
+ Deprecated. Reference the `release` field instead.
29
+ """
30
+
31
+ release: Release = pydantic.Field()
32
+ """
33
+ The Release that this Release Tag points to.
28
34
  """
29
35
 
30
36
  if IS_PYDANTIC_V2:
@@ -3,11 +3,13 @@ from typing import Sequence, Union, cast
3
3
 
4
4
  from vellum import (
5
5
  ChatMessage,
6
+ DocumentVellumValue,
6
7
  JsonVellumValue,
7
8
  PromptBlock,
8
9
  PromptRequestInput,
9
10
  RichTextPromptBlock,
10
11
  StringVellumValue,
12
+ VellumDocument,
11
13
  VellumVariable,
12
14
  )
13
15
  from vellum.client.types.audio_vellum_value import AudioVellumValue
@@ -159,6 +161,18 @@ def compile_prompt_blocks(
159
161
  cache_config=block.cache_config,
160
162
  )
161
163
  compiled_blocks.append(audio_block)
164
+
165
+ elif block.block_type == "DOCUMENT":
166
+ document_block = CompiledValuePromptBlock(
167
+ content=DocumentVellumValue(
168
+ value=VellumDocument(
169
+ src=block.src,
170
+ metadata=block.metadata,
171
+ )
172
+ ),
173
+ cache_config=block.cache_config,
174
+ )
175
+ compiled_blocks.append(document_block)
162
176
  else:
163
177
  raise PromptCompilationError(f"Unknown block_type: {block.block_type}")
164
178
 
@@ -0,0 +1,3 @@
1
+ # WARNING: This file will be removed in a future release. Please import from "vellum.client" instead.
2
+
3
+ from vellum.client.types.release import *
@@ -293,14 +293,14 @@ class BaseNode(Generic[StateType], metaclass=BaseNodeMeta):
293
293
 
294
294
  if cls.merge_behavior == MergeBehavior.AWAIT_ALL:
295
295
  """
296
- A node utilizing an AWAIT_ALL merge strategy will only be considered ready for the Nth time
297
- when all of its dependencies have been executed N times.
296
+ A node utilizing an AWAIT_ALL merge strategy will only be considered ready
297
+ when all of its dependencies have invoked this node.
298
298
  """
299
- current_node_execution_count = state.meta.node_execution_cache.get_execution_count(cls.node_class)
300
- return all(
301
- state.meta.node_execution_cache.get_execution_count(dep) == current_node_execution_count + 1
302
- for dep in dependencies
303
- )
299
+ # Check if all dependencies have invoked this node
300
+ dependencies_invoked = state.meta.node_execution_cache._dependencies_invoked.get(node_span_id, set())
301
+ all_deps_invoked = all(dep in dependencies_invoked for dep in dependencies)
302
+
303
+ return all_deps_invoked
304
304
 
305
305
  raise NodeException(
306
306
  message="Invalid Trigger Node Specification",
@@ -73,3 +73,5 @@ class BaseAdornmentNode(
73
73
  # Subclasses of BaseAdornableNode can override this method to provider their own
74
74
  # approach to annotating the outputs class based on the `subworkflow.Outputs`
75
75
  setattr(outputs_class, reference.name, reference)
76
+ if cls.__wrapped_node__:
77
+ cls.__output_ids__[reference.name] = cls.__wrapped_node__.__output_ids__[reference.name]
@@ -85,7 +85,7 @@ Message: {terminal_event.error.message}""",
85
85
  @classmethod
86
86
  def wrap(
87
87
  cls,
88
- max_attempts: int,
88
+ max_attempts: int = 3,
89
89
  delay: Optional[float] = None,
90
90
  retry_on_error_code: Optional[WorkflowErrorCode] = None,
91
91
  retry_on_condition: Optional[BaseDescriptor] = None,
@@ -102,4 +102,4 @@ Message: {event.error.message}""",
102
102
  if reference.name == "error":
103
103
  raise ValueError("`error` is a reserved name for TryNode.Outputs")
104
104
 
105
- setattr(outputs_class, reference.name, reference)
105
+ super().__annotate_outputs_class__(outputs_class, reference)
@@ -28,6 +28,9 @@ class BasePromptNode(BaseNode, Generic[StateType]):
28
28
  def _get_prompt_event_stream(self) -> Union[Iterator[AdHocExecutePromptEvent], Iterator[ExecutePromptEvent]]:
29
29
  pass
30
30
 
31
+ def _validate(self) -> None:
32
+ pass
33
+
31
34
  def run(self) -> Iterator[BaseOutput]:
32
35
  outputs = yield from self._process_prompt_event_stream()
33
36
  if outputs is None:
@@ -37,6 +40,7 @@ class BasePromptNode(BaseNode, Generic[StateType]):
37
40
  )
38
41
 
39
42
  def _process_prompt_event_stream(self) -> Generator[BaseOutput, None, Optional[List[PromptOutput]]]:
43
+ self._validate()
40
44
  try:
41
45
  prompt_event_stream = self._get_prompt_event_stream()
42
46
  except ApiError as e:
@@ -1,6 +1,6 @@
1
1
  import json
2
2
  from uuid import uuid4
3
- from typing import Callable, ClassVar, Generic, Iterator, List, Optional, Tuple, Union
3
+ from typing import Callable, ClassVar, Generic, Iterator, List, Optional, Set, Tuple, Union
4
4
 
5
5
  from vellum import (
6
6
  AdHocExecutePromptEvent,
@@ -18,6 +18,7 @@ from vellum import (
18
18
  from vellum.client import RequestOptions
19
19
  from vellum.client.types.chat_message_request import ChatMessageRequest
20
20
  from vellum.client.types.prompt_settings import PromptSettings
21
+ from vellum.client.types.rich_text_child_block import RichTextChildBlock
21
22
  from vellum.workflows.constants import OMIT
22
23
  from vellum.workflows.context import get_execution_context
23
24
  from vellum.workflows.errors import WorkflowErrorCode
@@ -59,6 +60,31 @@ class BaseInlinePromptNode(BasePromptNode[StateType], Generic[StateType]):
59
60
  class Trigger(BasePromptNode.Trigger):
60
61
  merge_behavior = MergeBehavior.AWAIT_ANY
61
62
 
63
+ def _extract_required_input_variables(self, blocks: Union[List[PromptBlock], List[RichTextChildBlock]]) -> Set[str]:
64
+ required_variables = set()
65
+
66
+ for block in blocks:
67
+ if block.block_type == "VARIABLE":
68
+ required_variables.add(block.input_variable)
69
+ elif block.block_type == "CHAT_MESSAGE" and block.blocks:
70
+ required_variables.update(self._extract_required_input_variables(block.blocks))
71
+ elif block.block_type == "RICH_TEXT" and block.blocks:
72
+ required_variables.update(self._extract_required_input_variables(block.blocks))
73
+
74
+ return required_variables
75
+
76
+ def _validate(self) -> None:
77
+ required_variables = self._extract_required_input_variables(self.blocks)
78
+ provided_variables = set(self.prompt_inputs.keys() if self.prompt_inputs else set())
79
+
80
+ missing_variables = required_variables - provided_variables
81
+ if missing_variables:
82
+ missing_vars_str = ", ".join(f"'{var}'" for var in missing_variables)
83
+ raise NodeException(
84
+ message=f"Missing required input variables by VariablePromptBlock: {missing_vars_str}",
85
+ code=WorkflowErrorCode.INVALID_INPUTS,
86
+ )
87
+
62
88
  def _get_prompt_event_stream(self) -> Iterator[AdHocExecutePromptEvent]:
63
89
  input_variables, input_values = self._compile_prompt_inputs()
64
90
  current_context = get_execution_context()
@@ -0,0 +1,182 @@
1
+ import pytest
2
+ from uuid import uuid4
3
+ from typing import Any, Iterator, List
4
+
5
+ from vellum import (
6
+ ChatMessagePromptBlock,
7
+ JinjaPromptBlock,
8
+ PlainTextPromptBlock,
9
+ PromptBlock,
10
+ RichTextPromptBlock,
11
+ VariablePromptBlock,
12
+ )
13
+ from vellum.client.types.execute_prompt_event import ExecutePromptEvent
14
+ from vellum.client.types.fulfilled_execute_prompt_event import FulfilledExecutePromptEvent
15
+ from vellum.client.types.initiated_execute_prompt_event import InitiatedExecutePromptEvent
16
+ from vellum.client.types.prompt_output import PromptOutput
17
+ from vellum.client.types.prompt_request_string_input import PromptRequestStringInput
18
+ from vellum.client.types.string_vellum_value import StringVellumValue
19
+ from vellum.workflows.errors import WorkflowErrorCode
20
+ from vellum.workflows.exceptions import NodeException
21
+ from vellum.workflows.nodes.displayable.bases.inline_prompt_node import BaseInlinePromptNode
22
+
23
+
24
+ def test_validation_with_missing_variables():
25
+ """Test that validation correctly identifies missing variables."""
26
+ test_blocks: List[PromptBlock] = [
27
+ VariablePromptBlock(input_variable="required_var1"),
28
+ VariablePromptBlock(input_variable="required_var2"),
29
+ RichTextPromptBlock(
30
+ blocks=[
31
+ PlainTextPromptBlock(text="Some text"),
32
+ VariablePromptBlock(input_variable="required_var3"),
33
+ ],
34
+ ),
35
+ JinjaPromptBlock(template="Template without variables"),
36
+ ChatMessagePromptBlock(
37
+ chat_role="USER",
38
+ blocks=[
39
+ RichTextPromptBlock(
40
+ blocks=[
41
+ PlainTextPromptBlock(text="Nested text"),
42
+ VariablePromptBlock(input_variable="required_var4"),
43
+ ],
44
+ ),
45
+ ],
46
+ ),
47
+ ]
48
+
49
+ # GIVEN a BaseInlinePromptNode
50
+ class TestNode(BaseInlinePromptNode):
51
+ ml_model = "test-model"
52
+ blocks = test_blocks
53
+ prompt_inputs = {
54
+ "required_var1": "value1",
55
+ # required_var2 is missing
56
+ # required_var3 is missing
57
+ # required_var4 is missing
58
+ }
59
+
60
+ # WHEN the node is run
61
+ node = TestNode()
62
+ with pytest.raises(NodeException) as excinfo:
63
+ list(node.run())
64
+
65
+ # THEN the node raises the correct NodeException
66
+ assert excinfo.value.code == WorkflowErrorCode.INVALID_INPUTS
67
+ assert "required_var2" in str(excinfo.value)
68
+ assert "required_var3" in str(excinfo.value)
69
+ assert "required_var4" in str(excinfo.value)
70
+
71
+
72
+ def test_validation_with_all_variables_provided(vellum_adhoc_prompt_client):
73
+ """Test that validation passes when all variables are provided."""
74
+ test_blocks: List[PromptBlock] = [
75
+ VariablePromptBlock(input_variable="required_var1"),
76
+ VariablePromptBlock(input_variable="required_var2"),
77
+ RichTextPromptBlock(
78
+ blocks=[
79
+ PlainTextPromptBlock(text="Some text"),
80
+ VariablePromptBlock(input_variable="required_var3"),
81
+ ],
82
+ ),
83
+ JinjaPromptBlock(template="Template without variables"),
84
+ ChatMessagePromptBlock(
85
+ chat_role="USER",
86
+ blocks=[
87
+ RichTextPromptBlock(
88
+ blocks=[
89
+ PlainTextPromptBlock(text="Nested text"),
90
+ VariablePromptBlock(input_variable="required_var4"),
91
+ ],
92
+ ),
93
+ ],
94
+ ),
95
+ ]
96
+
97
+ # GIVEN a BaseInlinePromptNode
98
+ class TestNode(BaseInlinePromptNode):
99
+ ml_model = "test-model"
100
+ blocks = test_blocks
101
+ prompt_inputs = {
102
+ "required_var1": "value1",
103
+ "required_var2": "value2",
104
+ "required_var3": "value3",
105
+ "required_var4": "value4",
106
+ }
107
+
108
+ expected_outputs: List[PromptOutput] = [
109
+ StringVellumValue(value="Test response"),
110
+ ]
111
+
112
+ def generate_prompt_events(*args: Any, **kwargs: Any) -> Iterator[ExecutePromptEvent]:
113
+ execution_id = str(uuid4())
114
+ events: List[ExecutePromptEvent] = [
115
+ InitiatedExecutePromptEvent(execution_id=execution_id),
116
+ FulfilledExecutePromptEvent(
117
+ execution_id=execution_id,
118
+ outputs=expected_outputs,
119
+ ),
120
+ ]
121
+ yield from events
122
+
123
+ vellum_adhoc_prompt_client.adhoc_execute_prompt_stream.side_effect = generate_prompt_events
124
+
125
+ # WHEN the node is run
126
+ node = TestNode()
127
+ list(node.run())
128
+
129
+ # THEN the prompt is executed with the correct inputs
130
+ mock_api = vellum_adhoc_prompt_client.adhoc_execute_prompt_stream
131
+ assert mock_api.call_count == 1
132
+ assert mock_api.call_args.kwargs["input_values"] == [
133
+ PromptRequestStringInput(key="required_var1", type="STRING", value="value1"),
134
+ PromptRequestStringInput(key="required_var2", type="STRING", value="value2"),
135
+ PromptRequestStringInput(key="required_var3", type="STRING", value="value3"),
136
+ PromptRequestStringInput(key="required_var4", type="STRING", value="value4"),
137
+ ]
138
+
139
+
140
+ def test_validation_with_extra_variables(vellum_adhoc_prompt_client):
141
+ """Test that validation passes when extra variables are provided."""
142
+ test_blocks: List[PromptBlock] = [
143
+ VariablePromptBlock(input_variable="required_var"),
144
+ ]
145
+
146
+ # GIVEN a BaseInlinePromptNode
147
+ class TestNode(BaseInlinePromptNode):
148
+ ml_model = "test-model"
149
+ blocks = test_blocks
150
+ prompt_inputs = {
151
+ "required_var": "value",
152
+ "extra_var": "extra_value", # This is not required
153
+ }
154
+
155
+ expected_outputs: List[PromptOutput] = [
156
+ StringVellumValue(value="Test response"),
157
+ ]
158
+
159
+ def generate_prompt_events(*args: Any, **kwargs: Any) -> Iterator[ExecutePromptEvent]:
160
+ execution_id = str(uuid4())
161
+ events: List[ExecutePromptEvent] = [
162
+ InitiatedExecutePromptEvent(execution_id=execution_id),
163
+ FulfilledExecutePromptEvent(
164
+ execution_id=execution_id,
165
+ outputs=expected_outputs,
166
+ ),
167
+ ]
168
+ yield from events
169
+
170
+ vellum_adhoc_prompt_client.adhoc_execute_prompt_stream.side_effect = generate_prompt_events
171
+
172
+ # WHEN the node is run
173
+ node = TestNode()
174
+ list(node.run())
175
+
176
+ # THEN the prompt is executed with the correct inputs
177
+ mock_api = vellum_adhoc_prompt_client.adhoc_execute_prompt_stream
178
+ assert mock_api.call_count == 1
179
+ assert mock_api.call_args.kwargs["input_values"] == [
180
+ PromptRequestStringInput(key="required_var", type="STRING", value="value"),
181
+ PromptRequestStringInput(key="extra_var", type="STRING", value="extra_value"),
182
+ ]
@@ -1,6 +1,7 @@
1
1
  import json
2
- from typing import Iterator
2
+ from typing import Any, Dict, Iterator, Type, Union
3
3
 
4
+ from vellum.workflows.constants import undefined
4
5
  from vellum.workflows.errors import WorkflowErrorCode
5
6
  from vellum.workflows.exceptions import NodeException
6
7
  from vellum.workflows.nodes.displayable.bases import BaseInlinePromptNode as BaseInlinePromptNode
@@ -30,9 +31,11 @@ class InlinePromptNode(BaseInlinePromptNode[StateType]):
30
31
  The outputs of the InlinePromptNode.
31
32
 
32
33
  text: str - The result of the Prompt Execution
34
+ json: Optional[Dict[Any, Any]] - The result of the Prompt Execution in JSON format
33
35
  """
34
36
 
35
37
  text: str
38
+ json: Union[Dict[Any, Any], Type[undefined]] = undefined
36
39
 
37
40
  def run(self) -> Iterator[BaseOutput]:
38
41
  outputs = yield from self._process_prompt_event_stream()
@@ -27,6 +27,7 @@ from vellum import (
27
27
  StringVellumValue,
28
28
  VellumAudio,
29
29
  VellumError,
30
+ VellumImage,
30
31
  )
31
32
  from vellum.prompts.blocks.compilation import compile_prompt_blocks
32
33
  from vellum.prompts.blocks.types import CompiledChatMessagePromptBlock
@@ -202,7 +203,7 @@ class OpenAIChatCompletionNode(InlinePromptNode[StateType]):
202
203
  json_content_item: ChatCompletionContentPartTextParam = {"type": "text", "text": json.dumps(json_value)}
203
204
  content.append(json_content_item)
204
205
  elif block.content.type == "IMAGE":
205
- image_value = cast(VellumAudio, block.content.value)
206
+ image_value = cast(VellumImage, block.content.value)
206
207
  image_content_item: ChatCompletionContentPartImageParam = {
207
208
  "type": "image_url",
208
209
  "image_url": {"url": image_value.src},
@@ -251,6 +252,11 @@ class OpenAIChatCompletionNode(InlinePromptNode[StateType]):
251
252
  }
252
253
 
253
254
  content.append(audio_content_item)
255
+ elif block.content.type == "DOCUMENT":
256
+ raise NodeException(
257
+ code=WorkflowErrorCode.PROVIDER_ERROR,
258
+ message="Document chat message content type is not currently supported",
259
+ )
254
260
  else:
255
261
  raise NodeException(
256
262
  code=WorkflowErrorCode.INTERNAL_ERROR,
@@ -1,7 +1,7 @@
1
1
  import pytest
2
2
  from typing import List, Optional
3
3
 
4
- from vellum import ChatMessage, SearchResult
4
+ from vellum import ChatMessage, SearchResult, VellumAudio, VellumDocument, VellumImage
5
5
  from vellum.workflows.types.core import Json
6
6
  from vellum.workflows.utils.vellum_variables import primitive_type_to_vellum_variable_type
7
7
 
@@ -21,6 +21,12 @@ from vellum.workflows.utils.vellum_variables import primitive_type_to_vellum_var
21
21
  (Optional[List[SearchResult]], "SEARCH_RESULTS"),
22
22
  (Json, "JSON"),
23
23
  (Optional[Json], "JSON"),
24
+ (VellumDocument, "DOCUMENT"),
25
+ (Optional[VellumDocument], "DOCUMENT"),
26
+ (VellumAudio, "AUDIO"),
27
+ (Optional[VellumAudio], "AUDIO"),
28
+ (VellumImage, "IMAGE"),
29
+ (Optional[VellumImage], "IMAGE"),
24
30
  ],
25
31
  )
26
32
  def test_primitive_type_to_vellum_variable_type(type_, expected):
@@ -10,6 +10,8 @@ from vellum import (
10
10
  SearchResultRequest,
11
11
  VellumAudio,
12
12
  VellumAudioRequest,
13
+ VellumDocument,
14
+ VellumDocumentRequest,
13
15
  VellumError,
14
16
  VellumErrorRequest,
15
17
  VellumImage,
@@ -62,6 +64,8 @@ def primitive_type_to_vellum_variable_type(type_: Union[Type, BaseDescriptor]) -
62
64
  return "IMAGE"
63
65
  elif _is_type_optionally_in(type_, (VellumAudio, VellumAudioRequest)):
64
66
  return "AUDIO"
67
+ elif _is_type_optionally_in(type_, (VellumDocument, VellumDocumentRequest)):
68
+ return "DOCUMENT"
65
69
  elif _is_type_optionally_in(type_, (VellumError, VellumErrorRequest)):
66
70
  return "ERROR"
67
71
  elif _is_type_optionally_in(type_, (List[ChatMessage], List[ChatMessageRequest])):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.14.16
3
+ Version: 0.14.17
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -24,22 +24,22 @@ vellum_ee/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
24
24
  vellum_ee/workflows/display/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  vellum_ee/workflows/display/base.py,sha256=ak29FIsawhaFa9_paZUHThlZRFJ1xB486JWKuSt1PYY,1965
26
26
  vellum_ee/workflows/display/nodes/__init__.py,sha256=436iSAh_Ex5tC68oEYvNgPu05ZVIAVXnS4PKGrQeZ0Y,321
27
- vellum_ee/workflows/display/nodes/base_node_display.py,sha256=pYmIdmC2dxLyRlZEBlgARIJMLcBX3GxZPpWZISv5Bbg,17170
27
+ vellum_ee/workflows/display/nodes/base_node_display.py,sha256=H7LJGtGWFkypyNhIrIhHoGYXS0hRYcP7aoYHlM90f_U,17395
28
28
  vellum_ee/workflows/display/nodes/base_node_vellum_display.py,sha256=ZLKQ8Xa3h9nGkj4t4V_7OeU7CBFWY3gXB9CkaCLOhEk,2699
29
- vellum_ee/workflows/display/nodes/get_node_display_class.py,sha256=RWCGm29-Tabi-qgVEIi_sdTWBv4bEzxAgwAoYI5T-Cc,2566
29
+ vellum_ee/workflows/display/nodes/get_node_display_class.py,sha256=LhV2wSo07iOTUj1clKwH2zzCzKdLiW2gk22R3Qco81E,2196
30
30
  vellum_ee/workflows/display/nodes/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
31
  vellum_ee/workflows/display/nodes/tests/test_base_node_display.py,sha256=QqR3Ly0RNrXwOeLdW5nERDFt0gRPf76n1bPES6o5UN4,1093
32
32
  vellum_ee/workflows/display/nodes/types.py,sha256=St1BB6no528OyELGiyRabWao0GGw6mLhstQAvEACbGk,247
33
33
  vellum_ee/workflows/display/nodes/utils.py,sha256=sloya5TpXsnot1HURc9L51INwflRqUzHxRVnCS9Cd-4,973
34
34
  vellum_ee/workflows/display/nodes/vellum/__init__.py,sha256=nUIgH2s0-7IbQRNrBhLPyRNe8YIrx3Yo9HeeW-aXXFk,1668
35
35
  vellum_ee/workflows/display/nodes/vellum/api_node.py,sha256=hoV-cUtS6H9kmRQXHd2py95GRWI_dAnnaPwvlNBkDOQ,8571
36
- vellum_ee/workflows/display/nodes/vellum/base_adornment_node.py,sha256=t-KSaai6cXRLkNkzCvEbM5SHh03B9fqPwVTH-Gei_-0,4419
36
+ vellum_ee/workflows/display/nodes/vellum/base_adornment_node.py,sha256=oJAQrAm5iFQq0_fX94sMbS3RQEK1M1VsoUck4vsPs9A,5820
37
37
  vellum_ee/workflows/display/nodes/vellum/code_execution_node.py,sha256=z00Z3L0d4PsUQo4S8FRDTtOFLtjdi17TJbatNVF4nM8,4288
38
38
  vellum_ee/workflows/display/nodes/vellum/conditional_node.py,sha256=ybLIa4uclqVIy3VAQvI1ivg2tnK5Ug_1R5a69DFqL7E,11104
39
39
  vellum_ee/workflows/display/nodes/vellum/error_node.py,sha256=I1Jkp2htRINJATtv1e-zs9BrReFX842djpiVgBPHDYg,2186
40
40
  vellum_ee/workflows/display/nodes/vellum/final_output_node.py,sha256=p-PvlnxpBQ7IKskZi2A19jKAtKnSxJ8LPbGMA83VkFk,2805
41
41
  vellum_ee/workflows/display/nodes/vellum/guardrail_node.py,sha256=aYZSJTxknU4LMiQdWk9LcK6CkhdozeDEMiRxfAyUNEc,2202
42
- vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=aNZhjw5CwpUO8IcLJ2nhYrzn96RJ3FWeJXdfDizuPzw,8491
42
+ vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=CPNW7HtKKJCGOvvE9DTn-56XVDLgAcBg2QHTfqU4N7k,8543
43
43
  vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py,sha256=MU9I8CB1X1TgL1aa1eT6DHWwNJ-2v79t74xl0oy-fBo,5510
44
44
  vellum_ee/workflows/display/nodes/vellum/map_node.py,sha256=8CPnn06HIBxBOiECevUffeVmQmCpec6WtPQnNl9gj9Y,3748
45
45
  vellum_ee/workflows/display/nodes/vellum/merge_node.py,sha256=HkNMgdQELiON42jdO-xDLmqrEKdGx1RVqrz2DXNTLS8,3239
@@ -93,7 +93,7 @@ vellum_ee/workflows/display/vellum.py,sha256=7mqQaKZPPrLMcXSAQkPIxCy5x8HkKs5PbCu
93
93
  vellum_ee/workflows/display/workflows/__init__.py,sha256=kapXsC67VJcgSuiBMa86FdePG5A9kMB5Pi4Uy1O2ob4,207
94
94
  vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256=QKMSmV--UoE8L7pYmKvZqtaatygrxmSm9PDSH71Yr0Y,19937
95
95
  vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py,sha256=kp0u8LN_2IwshLrhMImhpZx1hRyAcD5gXY-kDuuaGMQ,1269
96
- vellum_ee/workflows/display/workflows/tests/test_workflow_display.py,sha256=TzLziIh1fJ429LRUYoe88yd_HZflEQAw60TW8PAvvnk,6917
96
+ vellum_ee/workflows/display/workflows/tests/test_workflow_display.py,sha256=eCBCssh2J0hhGAKiv5pVxYKxMEUQCtFX-FKn49xercA,8576
97
97
  vellum_ee/workflows/display/workflows/vellum_workflow_display.py,sha256=mbAzCpswOek34ITeTkesbVreCXpulj4NFjIg3RcdVZ8,18243
98
98
  vellum_ee/workflows/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
99
99
  vellum_ee/workflows/server/virtual_file_loader.py,sha256=X_DdNK7MfyOjKWekk6YQpOSCT6klKcdjT6nVJcBH1sM,1481
@@ -119,12 +119,12 @@ vellum_ee/workflows/tests/local_workflow/workflow.py,sha256=A4qOzOPNwePYxWbcAgIP
119
119
  vellum_ee/workflows/tests/test_display_meta.py,sha256=xLQ7QtIXIiIm61pm2lyl588Ohzc3pjyq1nDp-qVu2Fs,2295
120
120
  vellum_ee/workflows/tests/test_server.py,sha256=M6vvQ2hjIpDWtQdDM9EPbMvUrZ93niAuYnxMNJWOjPA,511
121
121
  vellum_ee/workflows/tests/test_virtual_files.py,sha256=TJEcMR0v2S8CkloXNmCHA0QW0K6pYNGaIjraJz7sFvY,2762
122
- vellum/__init__.py,sha256=a_aM1_A04XGma4MAIDNeBF9BKzWbiQaVVMRzImHuxjA,36438
122
+ vellum/__init__.py,sha256=Vh9jxpBVXB2fTCyoUMlLdGL1Ujf0zNNNvqhNu5II8KI,36466
123
123
  vellum/client/README.md,sha256=JkCJjmMZl4jrPj46pkmL9dpK4gSzQQmP5I7z4aME4LY,4749
124
124
  vellum/client/__init__.py,sha256=tKtdM1_GqmGq1gpi9ydWD_T-MM7fPn8QdHh8ww19cNI,117564
125
125
  vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
126
126
  vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
127
- vellum/client/core/client_wrapper.py,sha256=0XmYpWbtLyEE04hBfhFVCncXeuj0BKKSGnpz2iz-nyA,1869
127
+ vellum/client/core/client_wrapper.py,sha256=lp2PrDTuF-fVxozBobWF7i3EJRnKs7ggqOiSWj5wuL4,1869
128
128
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
129
129
  vellum/client/core/file.py,sha256=X9IbmkZmB2bB_DpmZAO3crWdXagOakAyn6UCOCImCPg,2322
130
130
  vellum/client/core/http_client.py,sha256=R0pQpCppnEtxccGvXl4uJ76s7ro_65Fo_erlNNLp_AI,19228
@@ -189,7 +189,7 @@ vellum/client/resources/workspace_secrets/__init__.py,sha256=FTtvy8EDg9nNNg9WCat
189
189
  vellum/client/resources/workspace_secrets/client.py,sha256=h7UzXLyTttPq1t-JZGMg1BWxypxJvBGUdqg7KGT7MK4,8027
190
190
  vellum/client/resources/workspaces/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
191
191
  vellum/client/resources/workspaces/client.py,sha256=RthwzN1o-Jxwg5yyNNodavFyNUSxfLoTv26w3mRR5g8,3595
192
- vellum/client/types/__init__.py,sha256=_1pPNxQxjYSBB4L-j9HM1CHRaOEQNZa6XekVrKrseqg,54850
192
+ vellum/client/types/__init__.py,sha256=D6j2QfxOV3WULH0inmz9q-gdOQcytrr6_R8WYaC5ZZk,54894
193
193
  vellum/client/types/ad_hoc_execute_prompt_event.py,sha256=bCjujA2XsOgyF3bRZbcEqV2rOIymRgsLoIRtZpB14xg,607
194
194
  vellum/client/types/ad_hoc_expand_meta.py,sha256=1gv-NCsy_6xBYupLvZH979yf2VMdxAU-l0y0ynMKZaw,1331
195
195
  vellum/client/types/ad_hoc_fulfilled_prompt_execution_meta.py,sha256=Bfvf1d_dkmshxRACVM5vcxbH_7AQY23RmrrnPc0ytYY,939
@@ -482,6 +482,7 @@ vellum/client/types/rejected_execute_prompt_response.py,sha256=YyY9KGTbJ53DmwiXH
482
482
  vellum/client/types/rejected_execute_workflow_workflow_result_event.py,sha256=6k6GRr2TnVSM0DxS3H3hczwV7962UDG92yLh6j8OjcM,882
483
483
  vellum/client/types/rejected_prompt_execution_meta.py,sha256=4Oidh3XBBHzgjOys2kuETxqWc_MOqS50UpB0o8YNZd8,844
484
484
  vellum/client/types/rejected_workflow_node_result_event.py,sha256=o9AUc9hT60F8ckMkCx8HtKxtj4y82LBoJduB9u8w0TM,1319
485
+ vellum/client/types/release.py,sha256=lfB3zKIS8UIc19Lb7zAOEAHCvuN7wW23aXEkZMio5-Q,597
485
486
  vellum/client/types/release_tag_source.py,sha256=YavosOXZ976yfXTNWRTZwh2HhRiYmSDk0bQCkl-jCoQ,158
486
487
  vellum/client/types/replace_test_suite_test_case_request.py,sha256=c1GT1RUCei1yWxyZy4Gv40PkXYisvK5OkzlqQ6WeBYA,1906
487
488
  vellum/client/types/rich_text_child_block.py,sha256=X_ACKFKSUx5SXT1cLp0Y5-7VrNxcGOggPm67Lk2442U,270
@@ -672,7 +673,7 @@ vellum/client/types/workflow_output_string.py,sha256=_jclzbQ-Wlf-7FEVTWXhs9h5FWf
672
673
  vellum/client/types/workflow_push_deployment_config_request.py,sha256=pG6bZtlw7S0TcXtNRQNa7y_2NodZe7dp5SchIrgRUVU,745
673
674
  vellum/client/types/workflow_push_exec_config.py,sha256=6TaVMVqhSOz4DnY46l8axPDtytSioXDl9nHvFXSxH1g,94
674
675
  vellum/client/types/workflow_push_response.py,sha256=1vUSZmZ1GK1242dAkNwJnJI0rL3pBT3_0HOLLjdiutw,724
675
- vellum/client/types/workflow_release_tag_read.py,sha256=hevIvlmqfWnZWBLXAcXC7jxXQfnG4YUaV13DIvBycPQ,1155
676
+ vellum/client/types/workflow_release_tag_read.py,sha256=M-zNVQw7h0t9uzNSkp7Xl3SESERacqALu3mGemNpKRg,1263
676
677
  vellum/client/types/workflow_release_tag_workflow_deployment_history_item.py,sha256=pjWobdk9mZD3Px86rwFHfs_PYJBGXDKQUkxsgNEe6EA,825
677
678
  vellum/client/types/workflow_request_chat_history_input_request.py,sha256=WCZvwDuNS8ylWOOoKD3t7fHLSYB0h-fVCqeDRzqPoPA,898
678
679
  vellum/client/types/workflow_request_input_request.py,sha256=wgbKgKy-ftTzc6VMsgPkIiHaAujSiJjLizp2GfksX-A,632
@@ -725,7 +726,7 @@ vellum/plugins/utils.py,sha256=cPmxE9R2CK1bki2jKE8rB-G9zMf2pzHjSPDHFPXwd3Q,878
725
726
  vellum/plugins/vellum_mypy.py,sha256=QTuMSq6PiZW1dyTUZ5Bf1d4XkgFj0TKAgZLP8f4UgL4,27914
726
727
  vellum/prompts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
727
728
  vellum/prompts/blocks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
728
- vellum/prompts/blocks/compilation.py,sha256=lpGVuquFaHnGglON7-S8fIHgUIZyo6TdXOmBtG_Si-0,9497
729
+ vellum/prompts/blocks/compilation.py,sha256=qeC_4La5auQkm4EyzCMpN34F5R8mjiGcLV7IxKgVf3k,9973
729
730
  vellum/prompts/blocks/exceptions.py,sha256=vmk5PV6Vyw9nKjZYQDUDW0LH8MfQNIgFvFb_mFWdIRI,50
730
731
  vellum/prompts/blocks/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
731
732
  vellum/prompts/blocks/tests/test_compilation.py,sha256=EOUtdzJDFGbGhoc_y5XTMyO0HOpOM7FYJssPzd_yRVg,5235
@@ -1073,6 +1074,7 @@ vellum/types/rejected_execute_prompt_response.py,sha256=hznSDSGod3eOrTS6Z-8EUDnG
1073
1074
  vellum/types/rejected_execute_workflow_workflow_result_event.py,sha256=zyaUrnK5FRxkK8iQ71Il8exrNjC6W6lWsleJk6SSBxM,185
1074
1075
  vellum/types/rejected_prompt_execution_meta.py,sha256=ev1iT8IFkHn7YGg04DHdOGinBHQfIaKa0sEa2yuOjTw,168
1075
1076
  vellum/types/rejected_workflow_node_result_event.py,sha256=RbaaRh7xZUueiyOzfPZqwfYiRdRgeudbvk8jhqmBrew,173
1077
+ vellum/types/release.py,sha256=HNZgTcYeUxHyI25_6YncMzEvlswJ1zVESJ82bKO04Ak,145
1076
1078
  vellum/types/release_tag_source.py,sha256=71AVUOydv6hauj65Wj5l2qBa35jTLTheLpMIsgz3noI,156
1077
1079
  vellum/types/replace_test_suite_test_case_request.py,sha256=qMI4-4s_EXTOo29mZR14opoOTPp5_zIylPJU5ZPCvAc,174
1078
1080
  vellum/types/rich_text_child_block.py,sha256=W8xJl-2DOKrpht1-Dx9EcVa4XDO5t7CS1B5Zt_bS9ow,159
@@ -1363,8 +1365,8 @@ vellum/workflows/inputs/tests/test_inputs.py,sha256=g--YqWTNWzMk5Ktoj__gq988kvBR
1363
1365
  vellum/workflows/logging.py,sha256=_a217XogktV4Ncz6xKFz7WfYmZAzkfVRVuC0rWob8ls,437
1364
1366
  vellum/workflows/nodes/__init__.py,sha256=aVdQVv7Y3Ro3JlqXGpxwaU2zrI06plDHD2aumH5WUIs,1157
1365
1367
  vellum/workflows/nodes/bases/__init__.py,sha256=cniHuz_RXdJ4TQgD8CBzoiKDiPxg62ErdVpCbWICX64,58
1366
- vellum/workflows/nodes/bases/base.py,sha256=Y5xv0tFSSDafKDhVsxC5xTNrH15sj93jok6POzgWO0E,15351
1367
- vellum/workflows/nodes/bases/base_adornment_node.py,sha256=eFTgsPCYb3eyGS0-kw7C6crFnwFx437R5wh9-8bWYts,2905
1368
+ vellum/workflows/nodes/bases/base.py,sha256=eW-3RSkBgtuGY8x2nmbHYiUg_HXS5U57n3k6Fh-dJ9s,15330
1369
+ vellum/workflows/nodes/bases/base_adornment_node.py,sha256=afMwJLHK2Ke7sBpceVLnNdZMlU2O-6UgyG7lBt9SAQ8,3039
1368
1370
  vellum/workflows/nodes/bases/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1369
1371
  vellum/workflows/nodes/bases/tests/test_base_node.py,sha256=4SOdZzvugVtN8CIuo5RrapAxSYGXnxUwQ77dXJ64oTU,6295
1370
1372
  vellum/workflows/nodes/core/__init__.py,sha256=5zDMCmyt1v0HTJzlUBwq3U9L825yZGZhT9JL18-mRR4,455
@@ -1379,14 +1381,14 @@ vellum/workflows/nodes/core/map_node/node.py,sha256=dY27Xm11LHsqD7hnZnVYYDIazZ-X
1379
1381
  vellum/workflows/nodes/core/map_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1380
1382
  vellum/workflows/nodes/core/map_node/tests/test_node.py,sha256=uMR0AyIFn539LqTKHdwuBswnx1i-PHyqPpgtYrnmYMY,3496
1381
1383
  vellum/workflows/nodes/core/retry_node/__init__.py,sha256=lN2bIy5a3Uzhs_FYCrooADyYU6ZGShtvLKFWpelwPvo,60
1382
- vellum/workflows/nodes/core/retry_node/node.py,sha256=Vt3fx4G-DRIb9a-IHIUfaAclgfbzOPEQVkcumwhl9HE,4355
1384
+ vellum/workflows/nodes/core/retry_node/node.py,sha256=eRfDj54idq4JXkzgFyHNh18qnEMI6vLOtNgn7MsFg1o,4359
1383
1385
  vellum/workflows/nodes/core/retry_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1384
1386
  vellum/workflows/nodes/core/retry_node/tests/test_node.py,sha256=fNgDufkIsrTC-6ftvogqSpWhqqBj9iNESdfK19B1Yx0,5159
1385
1387
  vellum/workflows/nodes/core/templating_node/__init__.py,sha256=GmyuYo81_A1_Bz6id69ozVFS6FKiuDsZTiA3I6MaL2U,70
1386
1388
  vellum/workflows/nodes/core/templating_node/node.py,sha256=-JIqLUv6Xpx_LTVZt7whQ2X2VatgHDdTxjMrz64luEs,3721
1387
1389
  vellum/workflows/nodes/core/templating_node/tests/test_templating_node.py,sha256=MHofz-BwAgt7EXkab8VIyacYznDEIJ7Er7MJUaxNQQo,9614
1388
1390
  vellum/workflows/nodes/core/try_node/__init__.py,sha256=JVD4DrldTIqFQQFrubs9KtWCCc0YCAc7Fzol5ZWIWeM,56
1389
- vellum/workflows/nodes/core/try_node/node.py,sha256=5ux1l2HO12FBFFyhz6j-4yfBYVrqgT2maTAne_GnNDk,4434
1391
+ vellum/workflows/nodes/core/try_node/node.py,sha256=RbxL0NRXS0IxRP0MJAnLABolF6dkwVniiqsagzy-lwk,4445
1390
1392
  vellum/workflows/nodes/core/try_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1391
1393
  vellum/workflows/nodes/core/try_node/tests/test_node.py,sha256=h6eUc3SggvhzBWlOD0PrPUlkoCSQHwjqYn81VkxSIxU,4948
1392
1394
  vellum/workflows/nodes/displayable/__init__.py,sha256=6F_4DlSwvHuilWnIalp8iDjjDXl0Nmz4QzJV2PYe5RI,1023
@@ -1398,10 +1400,12 @@ vellum/workflows/nodes/displayable/bases/__init__.py,sha256=0mWIx3qUrzllV7jqt7wN
1398
1400
  vellum/workflows/nodes/displayable/bases/api_node/__init__.py,sha256=1jwx4WC358CLA1jgzl_UD-rZmdMm2v9Mps39ndwCD7U,64
1399
1401
  vellum/workflows/nodes/displayable/bases/api_node/node.py,sha256=Ev_So7D_4Qfvl2_E8veVfxAxWfbJIA2ujyW5istLg5I,4066
1400
1402
  vellum/workflows/nodes/displayable/bases/base_prompt_node/__init__.py,sha256=Org3xTvgp1pA0uUXFfnJr29D3HzCey2lEdYF4zbIUgo,70
1401
- vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py,sha256=nvhoWb8EyRlgtyotYp-wh194n30yQP81UnOH_a8FghY,3140
1403
+ vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py,sha256=HGNoGLJ9lbqflGdYFDIiuHFyi0iJ-agJu4kkJ7D3dGs,3212
1402
1404
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/__init__.py,sha256=Hl35IAoepRpE-j4cALaXVJIYTYOF3qszyVbxTj4kS1s,82
1403
1405
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/constants.py,sha256=fnjiRWLoRlC4Puo5oQcpZD5Hd-EesxsAo9l5tGAkpZQ,270
1404
- vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=FBE3_1F3WwgEKK28afD73cw2Zbm3BOk7SMpXuxDmYPw,7117
1406
+ vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=fTQ7KS5u7xCO_nJsWT5_LWgwGnYZg4U06slvXxHAzTU,8449
1407
+ vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1408
+ vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py,sha256=kodL6T6booHtdTLMJ9wSk_JHFgPNkTz3U8AJDB2cxtU,6696
1405
1409
  vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py,sha256=UIyNUUr8ii6rIx1gQL5vIPqTAww4ExxxJkFMoglc8LE,5794
1406
1410
  vellum/workflows/nodes/displayable/bases/search_node.py,sha256=3UtbqY3QO4kzfJHbmUNZGnEEfJmaoiF892u8H6TGjp8,5381
1407
1411
  vellum/workflows/nodes/displayable/bases/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1424,7 +1428,7 @@ vellum/workflows/nodes/displayable/final_output_node/tests/test_node.py,sha256=E
1424
1428
  vellum/workflows/nodes/displayable/guardrail_node/__init__.py,sha256=Ab5eXmOoBhyV4dMWdzh32HLUmnPIBEK_zFCT38C4Fng,68
1425
1429
  vellum/workflows/nodes/displayable/guardrail_node/node.py,sha256=h5nIBzQxbXTrdTq1wjDcekk1RV4-rKUNCshqdBAiJJY,4025
1426
1430
  vellum/workflows/nodes/displayable/inline_prompt_node/__init__.py,sha256=gSUOoEZLlrx35-tQhSAd3An8WDwBqyiQh-sIebLU9wU,74
1427
- vellum/workflows/nodes/displayable/inline_prompt_node/node.py,sha256=UWOxIa3kizhDa5joAmEDlh0P66EaqatqxzLLbvbUybw,2294
1431
+ vellum/workflows/nodes/displayable/inline_prompt_node/node.py,sha256=BaBfIfQX10sfoJ03H6zAnheae8bcJgi6Ff3SkYvmCKo,2523
1428
1432
  vellum/workflows/nodes/displayable/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1429
1433
  vellum/workflows/nodes/displayable/inline_prompt_node/tests/test_node.py,sha256=Qg1oGXecGH2Hp9oBAY42bTfKHBvNHHoJ6vUPPEj8Lq0,8539
1430
1434
  vellum/workflows/nodes/displayable/merge_node/__init__.py,sha256=J8IC08dSH7P76wKlNuxe1sn7toNGtSQdFirUbtPDEs0,60
@@ -1450,7 +1454,7 @@ vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py,sha
1450
1454
  vellum/workflows/nodes/experimental/README.md,sha256=eF6DfIL8t-HbF9-mcofOMymKrraiBHDLKTlnBa51ZiE,284
1451
1455
  vellum/workflows/nodes/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1452
1456
  vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py,sha256=lsyD9laR9p7kx5-BXGH2gUTM242UhKy8SMV0SR6S2iE,90
1453
- vellum/workflows/nodes/experimental/openai_chat_completion_node/node.py,sha256=1EGeiaT-Zoo6pttQFKKBcdf3dmhAbjKGaErYD5FFwlc,10185
1457
+ vellum/workflows/nodes/experimental/openai_chat_completion_node/node.py,sha256=cKI2Ls25L-JVt4z4a2ozQa-YBeVy21Z7BQ32Sj7iBPE,10460
1454
1458
  vellum/workflows/nodes/mocks.py,sha256=a1FjWEIocseMfjzM-i8DNozpUsaW0IONRpZmXBoWlyc,10455
1455
1459
  vellum/workflows/nodes/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1456
1460
  vellum/workflows/nodes/tests/test_mocks.py,sha256=mfPvrs75PKcsNsbJLQAN6PDFoVqs9TmQxpdyFKDdO60,7837
@@ -1503,9 +1507,9 @@ vellum/workflows/utils/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
1503
1507
  vellum/workflows/utils/tests/test_functions.py,sha256=ytdruS55aO2egsb5sAv8_9jf68L1cJoZu2uKV2iamrg,8083
1504
1508
  vellum/workflows/utils/tests/test_names.py,sha256=aOqpyvMsOEK_9mg_-yaNxQDW7QQfwqsYs37PseyLhxw,402
1505
1509
  vellum/workflows/utils/tests/test_uuids.py,sha256=i77ABQ0M3S-aFLzDXHJq_yr5FPkJEWCMBn1HJ3DObrE,437
1506
- vellum/workflows/utils/tests/test_vellum_variables.py,sha256=6H-BpmbIEmVRO75QQ3Rfy4bEUMMP2qwGzx2Gp1uXbfw,879
1510
+ vellum/workflows/utils/tests/test_vellum_variables.py,sha256=maI5e7Od7UlpMwlrOrcdlXqnFhonkXGnWq8G2-YQLi8,1155
1507
1511
  vellum/workflows/utils/uuids.py,sha256=DFzPv9RCvsKhvdTEIQyfSek2A31D6S_QcmeLPbgrgTY,739
1508
- vellum/workflows/utils/vellum_variables.py,sha256=fC2aSLvlS31D15dOWu43LBRR0QsgUKNXBiCUvvaLXSs,3231
1512
+ vellum/workflows/utils/vellum_variables.py,sha256=UiGlUh0a8vel2FbW3w-xbHxSv_jNutkDdqMVtP_b42A,3385
1509
1513
  vellum/workflows/vellum_client.py,sha256=GxOy3dX6A04xiY69vPv1S4YGuQ_TMxwHi6WRMimQBBE,762
1510
1514
  vellum/workflows/workflows/__init__.py,sha256=KY45TqvavCCvXIkyCFMEc0dc6jTMOUci93U2DUrlZYc,66
1511
1515
  vellum/workflows/workflows/base.py,sha256=TSS2BHC8LAi-N5GdEa75BeChwzwTzL7yldFnTlLINro,22665
@@ -1513,8 +1517,8 @@ vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnad
1513
1517
  vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1514
1518
  vellum/workflows/workflows/tests/test_base_workflow.py,sha256=NRteiICyJvDM5zrtUfq2fZoXcGQVaWC9xmNlLLVW0cU,7979
1515
1519
  vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
1516
- vellum_ai-0.14.16.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1517
- vellum_ai-0.14.16.dist-info/METADATA,sha256=yxCtedPJiwtcov54unW-4jIJErky44J7Mhs3BEedjW8,5408
1518
- vellum_ai-0.14.16.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1519
- vellum_ai-0.14.16.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1520
- vellum_ai-0.14.16.dist-info/RECORD,,
1520
+ vellum_ai-0.14.17.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1521
+ vellum_ai-0.14.17.dist-info/METADATA,sha256=DPUjnu7ydH6sKPprPiI_A0qW2RVB6JwGh5LV0DUZwk4,5408
1522
+ vellum_ai-0.14.17.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1523
+ vellum_ai-0.14.17.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1524
+ vellum_ai-0.14.17.dist-info/RECORD,,
@@ -59,7 +59,16 @@ _NodeDisplayAttrType = TypeVar("_NodeDisplayAttrType")
59
59
 
60
60
  class BaseNodeDisplayMeta(type):
61
61
  def __new__(mcs, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]) -> Any:
62
- cls = super().__new__(mcs, name, bases, dct)
62
+ cls = cast(Type["BaseNodeDisplay"], super().__new__(mcs, name, bases, dct))
63
+
64
+ if not dct.get("output_display"):
65
+ node_class = cls.infer_node_class()
66
+ cls.output_display = {
67
+ ref: NodeOutputDisplay(id=node_class.__output_ids__[ref.name], name=ref.name)
68
+ for ref in node_class.Outputs
69
+ if ref.name in node_class.__output_ids__
70
+ }
71
+
63
72
  return cls.__annotate_node__()
64
73
 
65
74
  def __annotate_node__(cls):
@@ -73,22 +82,36 @@ class BaseNodeDisplayMeta(type):
73
82
  # Display classes are able to override the id of the node class it's parameterized by
74
83
  node_class.__id__ = display_node_id
75
84
 
76
- output_display = getattr(cls, "output_display", None)
77
- if isinstance(output_display, dict):
78
- # And the node class' output ids
79
- for reference, node_output_display in output_display.items():
80
- if not isinstance(reference, OutputReference):
81
- continue
82
- if not isinstance(node_output_display, NodeOutputDisplay):
83
- continue
85
+ for reference, node_output_display in base_node_display_class.output_display.items():
86
+ if not isinstance(reference, OutputReference):
87
+ continue
88
+ if not isinstance(node_output_display, NodeOutputDisplay):
89
+ continue
84
90
 
85
- node_class.__output_ids__[reference.name] = node_output_display.id
91
+ node_class.__output_ids__[reference.name] = node_output_display.id
86
92
 
87
93
  return cls
88
94
 
95
+ def infer_node_class(cls) -> Type[BaseNode]:
96
+ original_base = get_original_base(cls)
97
+ node_class = get_args(original_base)[0]
98
+ if isinstance(node_class, TypeVar):
99
+ bounded_class = node_class.__bound__
100
+ if inspect.isclass(bounded_class) and issubclass(bounded_class, BaseNode):
101
+ return bounded_class
102
+
103
+ if isinstance(bounded_class, ForwardRef) and bounded_class.__forward_arg__ == BaseNode.__name__:
104
+ return BaseNode
105
+
106
+ if issubclass(node_class, BaseNode):
107
+ return node_class
108
+
109
+ raise ValueError(f"Node {cls.__name__} must be a subclass of {BaseNode.__name__}")
110
+
89
111
 
90
112
  class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
91
- output_display: Dict[OutputReference, NodeOutputDisplay] = {}
113
+ # Default values set by the metaclass
114
+ output_display: Dict[OutputReference, NodeOutputDisplay]
92
115
  port_displays: Dict[Port, PortDisplayOverrides] = {}
93
116
  node_input_ids_by_name: ClassVar[Dict[str, UUID]] = {}
94
117
 
@@ -235,23 +258,6 @@ class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
235
258
  def get_from_node_display_registry(cls, node_class: Type[NodeType]) -> Optional[Type["BaseNodeDisplay"]]:
236
259
  return cls._node_display_registry.get(node_class)
237
260
 
238
- @classmethod
239
- def infer_node_class(cls) -> Type[NodeType]:
240
- original_base = get_original_base(cls)
241
- node_class = get_args(original_base)[0]
242
- if isinstance(node_class, TypeVar):
243
- bounded_class = node_class.__bound__
244
- if inspect.isclass(bounded_class) and issubclass(bounded_class, BaseNode):
245
- return cast(Type[NodeType], bounded_class)
246
-
247
- if isinstance(bounded_class, ForwardRef) and bounded_class.__forward_arg__ == BaseNode.__name__:
248
- return cast(Type[NodeType], BaseNode)
249
-
250
- if issubclass(node_class, BaseNode):
251
- return node_class
252
-
253
- raise ValueError(f"Node {cls.__name__} must be a subclass of {BaseNode.__name__}")
254
-
255
261
  @cached_property
256
262
  def node_id(self) -> UUID:
257
263
  """Can be overridden as a class attribute to specify a custom node id."""
@@ -264,7 +270,7 @@ class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
264
270
 
265
271
  @property
266
272
  def _node(self) -> Type[NodeType]:
267
- return self.infer_node_class()
273
+ return cast(Type[NodeType], self.__class__.infer_node_class())
268
274
 
269
275
  @classmethod
270
276
  def _get_explicit_node_display_attr(
@@ -5,7 +5,6 @@ from typing import TYPE_CHECKING, Any, Dict, Optional, Type
5
5
  from vellum.workflows.descriptors.base import BaseDescriptor
6
6
  from vellum.workflows.types.generics import NodeType
7
7
  from vellum.workflows.utils.uuids import uuid4_from_hash
8
- from vellum_ee.workflows.display.nodes.types import NodeOutputDisplay
9
8
 
10
9
  if TYPE_CHECKING:
11
10
  from vellum_ee.workflows.display.types import NodeDisplayType
@@ -43,14 +42,6 @@ def get_node_display_class(
43
42
  return {}
44
43
 
45
44
  def exec_body(ns: Dict):
46
- output_display = {
47
- ref: NodeOutputDisplay(id=node_class.__output_ids__[ref.name], name=ref.name)
48
- for ref in node_class.Outputs
49
- if ref.name in node_class.__output_ids__
50
- }
51
- if output_display:
52
- ns["output_display"] = output_display
53
-
54
45
  node_input_ids_by_name: Dict[str, UUID] = {}
55
46
  for ref in node_class:
56
47
  node_input_ids_by_name.update(_get_node_input_ids_by_ref(ref.name, ref.instance))
@@ -3,7 +3,6 @@ import types
3
3
  from uuid import UUID
4
4
  from typing import Any, Callable, Dict, Generic, Optional, Type, TypeVar, cast
5
5
 
6
- from vellum.workflows.nodes.bases.base import BaseNode
7
6
  from vellum.workflows.nodes.bases.base_adornment_node import BaseAdornmentNode
8
7
  from vellum.workflows.nodes.utils import get_wrapped_node
9
8
  from vellum.workflows.types.core import JsonArray, JsonObject
@@ -12,6 +11,7 @@ from vellum.workflows.utils.uuids import uuid4_from_hash
12
11
  from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
13
12
  from vellum_ee.workflows.display.nodes.base_node_vellum_display import BaseNodeVellumDisplay
14
13
  from vellum_ee.workflows.display.nodes.get_node_display_class import get_node_display_class
14
+ from vellum_ee.workflows.display.nodes.types import NodeOutputDisplay
15
15
  from vellum_ee.workflows.display.types import WorkflowDisplayContext
16
16
 
17
17
  _BaseAdornmentNodeType = TypeVar("_BaseAdornmentNodeType", bound=BaseAdornmentNode)
@@ -44,12 +44,17 @@ class BaseAdornmentNodeDisplay(BaseNodeVellumDisplay[_BaseAdornmentNodeType], Ge
44
44
  return serialized_wrapped_node
45
45
 
46
46
  @classmethod
47
- def wrap(cls, **kwargs: Any) -> Callable[..., Type[BaseNodeDisplay]]:
47
+ def wrap(cls, node_id: Optional[UUID] = None, **kwargs: Any) -> Callable[..., Type[BaseNodeDisplay]]:
48
48
  NodeDisplayType = TypeVar("NodeDisplayType", bound=BaseNodeDisplay)
49
49
 
50
- def decorator(inner_cls: Type[NodeDisplayType]) -> Type[NodeDisplayType]:
51
- node_class = inner_cls.infer_node_class()
52
- wrapped_node_class = cast(Type[BaseNode], node_class.__wrapped_node__)
50
+ def decorator(wrapped_node_display_class: Type[NodeDisplayType]) -> Type[NodeDisplayType]:
51
+ node_class = wrapped_node_display_class.infer_node_class()
52
+ if not issubclass(node_class, BaseAdornmentNode):
53
+ raise ValueError(f"Node {node_class.__name__} must be wrapped with a {BaseAdornmentNode.__name__}")
54
+
55
+ wrapped_node_class = node_class.__wrapped_node__
56
+ if not wrapped_node_class:
57
+ raise ValueError(f"Node {node_class.__name__} must be used as an adornment with the `wrap` method.")
53
58
 
54
59
  # `mypy` is wrong here, `cls` is indexable bc it's Generic
55
60
  BaseAdornmentDisplay = cls[node_class] # type: ignore[index]
@@ -58,36 +63,51 @@ class BaseAdornmentNodeDisplay(BaseNodeVellumDisplay[_BaseAdornmentNodeType], Ge
58
63
  for key, kwarg in kwargs.items():
59
64
  ns[key] = kwarg
60
65
 
61
- if "node_id" not in kwargs:
62
- ns["node_id"] = uuid4_from_hash(node_class.__qualname__)
66
+ ns["node_id"] = node_id or uuid4_from_hash(node_class.__qualname__)
63
67
 
64
68
  AdornmentDisplay = types.new_class(
65
69
  re.sub(r"^Base", "", cls.__name__), bases=(BaseAdornmentDisplay,), exec_body=exec_body
66
70
  )
67
71
 
68
- setattr(inner_cls, "__adorned_by__", AdornmentDisplay)
72
+ setattr(wrapped_node_display_class, "__adorned_by__", AdornmentDisplay)
69
73
 
70
74
  # We must edit the node display class to use __wrapped_node__ everywhere it
71
75
  # references the adorned node class, which is three places:
72
76
 
73
77
  # 1. The node display class' parameterized type
74
- original_base_node_display = get_original_base(inner_cls)
78
+ original_base_node_display = get_original_base(wrapped_node_display_class)
75
79
  original_base_node_display.__args__ = (wrapped_node_class,)
76
- inner_cls._node_display_registry[wrapped_node_class] = inner_cls
77
- inner_cls.__annotate_node__()
80
+ wrapped_node_display_class._node_display_registry[wrapped_node_class] = wrapped_node_display_class
81
+ wrapped_node_display_class.__annotate_node__()
78
82
 
79
83
  # 2. The node display class' output displays
80
- old_outputs = list(inner_cls.output_display.keys())
81
- for old_output in old_outputs:
82
- new_output = getattr(wrapped_node_class.Outputs, old_output.name)
83
- inner_cls.output_display[new_output] = inner_cls.output_display.pop(old_output)
84
+ for old_output in node_class.Outputs:
85
+ new_output = getattr(wrapped_node_class.Outputs, old_output.name, None)
86
+ if new_output is None:
87
+ # If the adornment is adding a new output, such as TryNode adding an "error" output,
88
+ # we skip it, since it should not be included in the adorned node's output displays
89
+ wrapped_node_display_class.output_display.pop(old_output, None)
90
+ continue
91
+
92
+ if old_output not in wrapped_node_display_class.output_display:
93
+ # If the adorned node doesn't have an output display defined for this output, we define one
94
+ wrapped_node_display_class.output_display[new_output] = NodeOutputDisplay(
95
+ id=wrapped_node_class.__output_ids__[old_output.name],
96
+ name=old_output.name,
97
+ )
98
+ else:
99
+ wrapped_node_display_class.output_display[new_output] = (
100
+ wrapped_node_display_class.output_display.pop(old_output)
101
+ )
84
102
 
85
103
  # 3. The node display class' port displays
86
- old_ports = list(inner_cls.port_displays.keys())
104
+ old_ports = list(wrapped_node_display_class.port_displays.keys())
87
105
  for old_port in old_ports:
88
106
  new_port = getattr(wrapped_node_class.Ports, old_port.name)
89
- inner_cls.port_displays[new_port] = inner_cls.port_displays.pop(old_port)
107
+ wrapped_node_display_class.port_displays[new_port] = wrapped_node_display_class.port_displays.pop(
108
+ old_port
109
+ )
90
110
 
91
- return inner_cls
111
+ return wrapped_node_display_class
92
112
 
93
113
  return decorator
@@ -19,6 +19,7 @@ _InlinePromptNodeType = TypeVar("_InlinePromptNodeType", bound=InlinePromptNode)
19
19
  class BaseInlinePromptNodeDisplay(BaseNodeVellumDisplay[_InlinePromptNodeType], Generic[_InlinePromptNodeType]):
20
20
  output_id: ClassVar[Optional[UUID]] = None
21
21
  array_output_id: ClassVar[Optional[UUID]] = None
22
+ json_output_id: ClassVar[Optional[UUID]] = None
22
23
  prompt_input_ids_by_name: ClassVar[Dict[str, UUID]] = {}
23
24
 
24
25
  def serialize(
@@ -5,9 +5,11 @@ from vellum.workflows.nodes.bases.base import BaseNode
5
5
  from vellum.workflows.nodes.core.inline_subworkflow_node.node import InlineSubworkflowNode
6
6
  from vellum.workflows.nodes.core.retry_node.node import RetryNode
7
7
  from vellum.workflows.nodes.core.templating_node.node import TemplatingNode
8
+ from vellum.workflows.nodes.core.try_node.node import TryNode
8
9
  from vellum.workflows.workflows.base import BaseWorkflow
9
10
  from vellum_ee.workflows.display.nodes import BaseNodeDisplay
10
11
  from vellum_ee.workflows.display.nodes.vellum.retry_node import BaseRetryNodeDisplay
12
+ from vellum_ee.workflows.display.nodes.vellum.try_node import BaseTryNodeDisplay
11
13
  from vellum_ee.workflows.display.vellum import NodeDisplayData, NodeDisplayPosition
12
14
  from vellum_ee.workflows.display.workflows import VellumWorkflowDisplay
13
15
  from vellum_ee.workflows.display.workflows.get_vellum_workflow_display_class import get_workflow_display
@@ -119,6 +121,29 @@ def test_get_event_display_context__node_display_filled_without_base_display():
119
121
  assert StartNode.__output_ids__ == node_event_display.output_display
120
122
 
121
123
 
124
+ def test_get_event_display_context__node_display_filled_without_output_display():
125
+ # GIVEN a simple workflow
126
+ class StartNode(BaseNode):
127
+ class Outputs(BaseNode.Outputs):
128
+ foo: str
129
+
130
+ class MyWorkflow(BaseWorkflow):
131
+ graph = StartNode
132
+
133
+ class StartNodeDisplay(BaseNodeDisplay[StartNode]):
134
+ pass
135
+
136
+ # WHEN we gather the event display context
137
+ display_context = VellumWorkflowDisplay(MyWorkflow).get_event_display_context()
138
+
139
+ # THEN the node display should be included
140
+ assert str(StartNode.__id__) in display_context.node_displays
141
+ node_event_display = display_context.node_displays[str(StartNode.__id__)]
142
+
143
+ # AND so should their output ids
144
+ assert node_event_display.output_display.keys() == {"foo"}
145
+
146
+
122
147
  def test_get_event_display_context__node_display_to_include_subworkflow_display():
123
148
  # GIVEN a simple workflow
124
149
  class InnerNode(BaseNode):
@@ -145,19 +170,33 @@ def test_get_event_display_context__node_display_to_include_subworkflow_display(
145
170
  assert str(InnerNode.__id__) in node_event_display.subworkflow_display.node_displays
146
171
 
147
172
 
148
- def test_get_event_display_context__node_display_for_adornment_nodes():
149
- # GIVEN a simple workflow with a retry node adornment
150
- @RetryNode.wrap(max_attempts=4)
173
+ @pytest.mark.parametrize(
174
+ ["AdornmentNode", "AdornmentNodeDisplay", "expected_adornment_output_names"],
175
+ [
176
+ [RetryNode, BaseRetryNodeDisplay, {"foo"}],
177
+ [TryNode, BaseTryNodeDisplay, {"foo", "error"}],
178
+ ],
179
+ ids=["retry_node", "try_node"],
180
+ )
181
+ def test_get_event_display_context__node_display_for_adornment_nodes(
182
+ AdornmentNode,
183
+ AdornmentNodeDisplay,
184
+ expected_adornment_output_names,
185
+ ):
186
+ # GIVEN a simple workflow with an adornment
187
+ @AdornmentNode.wrap()
151
188
  class MyNode(BaseNode):
152
- pass
189
+ class Outputs(BaseNode.Outputs):
190
+ foo: str
153
191
 
154
192
  class MyWorkflow(BaseWorkflow):
155
193
  graph = MyNode
156
194
 
157
195
  # AND a display class for the node
196
+ adornment_node_id = uuid4()
158
197
  inner_node_id = uuid4()
159
198
 
160
- @BaseRetryNodeDisplay.wrap()
199
+ @AdornmentNodeDisplay.wrap(node_id=adornment_node_id)
161
200
  class MyNodeDisplay(BaseNodeDisplay[MyNode]):
162
201
  node_id = inner_node_id
163
202
 
@@ -165,11 +204,16 @@ def test_get_event_display_context__node_display_for_adornment_nodes():
165
204
  display_context = VellumWorkflowDisplay(MyWorkflow).get_event_display_context()
166
205
 
167
206
  # THEN the subworkflow display should be included
168
- assert str(MyNode.__id__) in display_context.node_displays
169
- node_event_display = display_context.node_displays[str(MyNode.__id__)]
207
+ assert str(adornment_node_id) in display_context.node_displays
208
+ node_event_display = display_context.node_displays[str(adornment_node_id)]
170
209
  assert node_event_display.subworkflow_display is not None
171
210
  assert str(inner_node_id) in node_event_display.subworkflow_display.node_displays
172
211
 
212
+ # AND the inner node should have the correct outputs
213
+ inner_node_display = node_event_display.subworkflow_display.node_displays[str(inner_node_id)]
214
+ assert inner_node_display.output_display.keys() == {"foo"}
215
+ assert node_event_display.output_display.keys() == expected_adornment_output_names
216
+
173
217
 
174
218
  def test_get_event_display_context__templating_node_input_display():
175
219
  # GIVEN a simple workflow with a templating node referencing another node output