vellum-ai 1.3.10__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. vellum/client/core/client_wrapper.py +2 -2
  2. vellum/client/tests/__init__.py +0 -0
  3. vellum/client/tests/test_utils.py +34 -0
  4. vellum/client/types/variable_prompt_block.py +7 -0
  5. vellum/client/utils.py +24 -0
  6. vellum/workflows/__init__.py +2 -0
  7. vellum/workflows/errors/types.py +1 -0
  8. vellum/workflows/inputs/__init__.py +2 -0
  9. vellum/workflows/inputs/dataset_row.py +38 -0
  10. vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +13 -1
  11. vellum/workflows/nodes/displayable/tool_calling_node/node.py +2 -3
  12. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +2 -37
  13. vellum/workflows/nodes/tests/test_utils.py +4 -4
  14. vellum/workflows/nodes/utils.py +4 -4
  15. vellum/workflows/sandbox.py +6 -2
  16. vellum/workflows/tests/test_dataset_row.py +99 -0
  17. vellum/workflows/types/definition.py +25 -9
  18. vellum/workflows/utils/functions.py +40 -1
  19. {vellum_ai-1.3.10.dist-info → vellum_ai-1.4.0.dist-info}/METADATA +1 -1
  20. {vellum_ai-1.3.10.dist-info → vellum_ai-1.4.0.dist-info}/RECORD +27 -22
  21. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_serialization.py +1 -1
  22. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py +1 -1
  23. vellum_ee/workflows/display/workflows/base_workflow_display.py +5 -1
  24. vellum_ee/workflows/tests/test_serialize_module.py +5 -6
  25. {vellum_ai-1.3.10.dist-info → vellum_ai-1.4.0.dist-info}/LICENSE +0 -0
  26. {vellum_ai-1.3.10.dist-info → vellum_ai-1.4.0.dist-info}/WHEEL +0 -0
  27. {vellum_ai-1.3.10.dist-info → vellum_ai-1.4.0.dist-info}/entry_points.txt +0 -0
@@ -27,10 +27,10 @@ class BaseClientWrapper:
27
27
 
28
28
  def get_headers(self) -> typing.Dict[str, str]:
29
29
  headers: typing.Dict[str, str] = {
30
- "User-Agent": "vellum-ai/1.3.10",
30
+ "User-Agent": "vellum-ai/1.4.0",
31
31
  "X-Fern-Language": "Python",
32
32
  "X-Fern-SDK-Name": "vellum-ai",
33
- "X-Fern-SDK-Version": "1.3.10",
33
+ "X-Fern-SDK-Version": "1.4.0",
34
34
  **(self.get_custom_headers() or {}),
35
35
  }
36
36
  if self._api_version is not None:
File without changes
@@ -0,0 +1,34 @@
1
+ import uuid
2
+ from unittest.mock import Mock
3
+ from vellum.client.utils import convert_input_variable_to_uuid
4
+
5
+
6
+ def test_convert_input_variable_to_uuid_with_valid_uuid():
7
+ """Test convert_input_variable_to_uuid with valid UUID passes through unchanged."""
8
+ uuid_value = "123e4567-e89b-12d3-a456-426614174000"
9
+
10
+ # GIVEN a valid UUID
11
+ # WHEN the function is called
12
+ result = convert_input_variable_to_uuid(uuid_value)
13
+
14
+ # THEN the result should be the original UUID unchanged
15
+ assert result == uuid_value
16
+
17
+
18
+ def test_convert_input_variable_to_uuid_with_invalid_uuid_converts():
19
+ """Test convert_input_variable_to_uuid with invalid UUID gets converted to UUID."""
20
+ non_uuid_value = "some_variable_name"
21
+ executable_id = "test_executable_123"
22
+
23
+ # GIVEN an invalid UUID with context
24
+ mock_info = Mock()
25
+ mock_info.context = {"executable_id": executable_id}
26
+
27
+ # WHEN the function is called
28
+ result = convert_input_variable_to_uuid(non_uuid_value, mock_info)
29
+
30
+ # THEN it should return a different value
31
+ assert result != non_uuid_value
32
+
33
+ # AND it's a valid UUID
34
+ uuid.UUID(result) # This will raise ValueError if not a valid UUID
@@ -3,6 +3,7 @@
3
3
  import typing
4
4
 
5
5
  import pydantic
6
+ from vellum.client.utils import convert_input_variable_to_uuid
6
7
  from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
7
8
  from .ephemeral_prompt_cache_config import EphemeralPromptCacheConfig
8
9
  from .prompt_block_state import PromptBlockState
@@ -26,3 +27,9 @@ class VariablePromptBlock(UniversalBaseModel):
26
27
  frozen = True
27
28
  smart_union = True
28
29
  extra = pydantic.Extra.allow
30
+
31
+
32
+ @pydantic.field_serializer("input_variable")
33
+ def serialize_input_variable(self, value: str, info: pydantic.SerializationInfo) -> str:
34
+ """Convert input_variable to UUID using executable_id from context."""
35
+ return convert_input_variable_to_uuid(value, info)
vellum/client/utils.py ADDED
@@ -0,0 +1,24 @@
1
+ # Custom client utilities
2
+ # This file is not generated by Fern and can be freely edited
3
+
4
+ from uuid import UUID
5
+ from typing import Optional
6
+ from pydantic import SerializationInfo
7
+
8
+
9
+ def convert_input_variable_to_uuid(value: str, info: Optional[SerializationInfo] = None) -> str:
10
+ """Convert input_variable to UUID using executable_id from context."""
11
+ # Check if value is already a valid UUID
12
+ try:
13
+ UUID(value)
14
+ return value # Already a UUID, return as-is
15
+ except ValueError:
16
+ pass # Not a UUID, continue with conversion logic
17
+
18
+ if info and hasattr(info, "context") and info.context:
19
+ executable_id = info.context.get("executable_id")
20
+ if executable_id:
21
+ from vellum.workflows.utils.uuids import uuid4_from_hash # avoid circular import
22
+
23
+ return str(uuid4_from_hash(f"{executable_id}|{value}"))
24
+ return value
@@ -1,5 +1,7 @@
1
+ from .inputs.dataset_row import DatasetRow
1
2
  from .workflows import BaseWorkflow
2
3
 
3
4
  __all__ = [
4
5
  "BaseWorkflow",
6
+ "DatasetRow",
5
7
  ]
@@ -40,6 +40,7 @@ _VELLUM_ERROR_CODE_TO_WORKFLOW_ERROR_CODE: Dict[VellumErrorCodeEnum, WorkflowErr
40
40
  "INVALID_REQUEST": WorkflowErrorCode.INVALID_INPUTS,
41
41
  "INVALID_INPUTS": WorkflowErrorCode.INVALID_INPUTS,
42
42
  "PROVIDER_ERROR": WorkflowErrorCode.PROVIDER_ERROR,
43
+ "PROVIDER_CREDENTIALS_UNAVAILABLE": WorkflowErrorCode.PROVIDER_CREDENTIALS_UNAVAILABLE,
43
44
  "REQUEST_TIMEOUT": WorkflowErrorCode.PROVIDER_ERROR,
44
45
  "INTERNAL_SERVER_ERROR": WorkflowErrorCode.INTERNAL_ERROR,
45
46
  "USER_DEFINED_ERROR": WorkflowErrorCode.USER_DEFINED_ERROR,
@@ -1,5 +1,7 @@
1
1
  from .base import BaseInputs
2
+ from .dataset_row import DatasetRow
2
3
 
3
4
  __all__ = [
4
5
  "BaseInputs",
6
+ "DatasetRow",
5
7
  ]
@@ -0,0 +1,38 @@
1
+ from typing import Any, Dict
2
+
3
+ from pydantic import field_serializer
4
+
5
+ from vellum.client.core.pydantic_utilities import UniversalBaseModel
6
+ from vellum.workflows.inputs.base import BaseInputs
7
+
8
+
9
+ class DatasetRow(UniversalBaseModel):
10
+ """
11
+ Universal base model representing a dataset row with a label and inputs.
12
+
13
+ Attributes:
14
+ label: String label for the dataset row
15
+ inputs: BaseInputs instance containing the input data
16
+ """
17
+
18
+ label: str
19
+ inputs: BaseInputs
20
+
21
+ @field_serializer("inputs")
22
+ def serialize_inputs(self, inputs: BaseInputs) -> Dict[str, Any]:
23
+ """
24
+ Custom serializer for BaseInputs that converts it to a dictionary.
25
+
26
+ Args:
27
+ inputs: BaseInputs instance to serialize
28
+
29
+ Returns:
30
+ Dictionary representation of the inputs
31
+ """
32
+ result = {}
33
+
34
+ for input_descriptor, value in inputs:
35
+ if not input_descriptor.name.startswith("__"):
36
+ result[input_descriptor.name] = value
37
+
38
+ return result
@@ -45,12 +45,14 @@ from vellum.workflows.nodes.displayable.bases.base_prompt_node import BasePrompt
45
45
  from vellum.workflows.nodes.displayable.bases.utils import process_additional_prompt_outputs
46
46
  from vellum.workflows.outputs import BaseOutput
47
47
  from vellum.workflows.types import MergeBehavior
48
- from vellum.workflows.types.definition import DeploymentDefinition
48
+ from vellum.workflows.types.definition import DeploymentDefinition, MCPServer
49
49
  from vellum.workflows.types.generics import StateType, is_workflow_class
50
50
  from vellum.workflows.utils.functions import (
51
51
  compile_function_definition,
52
52
  compile_inline_workflow_function_definition,
53
+ compile_mcp_tool_definition,
53
54
  compile_workflow_deployment_function_definition,
55
+ get_mcp_tool_name,
54
56
  )
55
57
  from vellum.workflows.utils.pydantic_schema import normalize_json
56
58
 
@@ -140,6 +142,16 @@ class BaseInlinePromptNode(BasePromptNode[StateType], Generic[StateType]):
140
142
  normalized_functions.append(compile_inline_workflow_function_definition(function))
141
143
  elif callable(function):
142
144
  normalized_functions.append(compile_function_definition(function))
145
+ elif isinstance(function, MCPServer):
146
+ tool_definitions = compile_mcp_tool_definition(function)
147
+ for tool_def in tool_definitions:
148
+ normalized_functions.append(
149
+ FunctionDefinition(
150
+ name=get_mcp_tool_name(tool_def),
151
+ description=tool_def.description,
152
+ parameters=tool_def.parameters,
153
+ )
154
+ )
143
155
  else:
144
156
  raise NodeException(
145
157
  message=f"`{function}` is not a valid function definition",
@@ -18,14 +18,13 @@ from vellum.workflows.nodes.displayable.tool_calling_node.utils import (
18
18
  create_router_node,
19
19
  create_tool_prompt_node,
20
20
  get_function_name,
21
- get_mcp_tool_name,
22
- hydrate_mcp_tool_definitions,
23
21
  )
24
22
  from vellum.workflows.outputs.base import BaseOutput, BaseOutputs
25
23
  from vellum.workflows.state.context import WorkflowContext
26
24
  from vellum.workflows.types.core import EntityInputsInterface, Tool
27
25
  from vellum.workflows.types.definition import MCPServer
28
26
  from vellum.workflows.types.generics import StateType
27
+ from vellum.workflows.utils.functions import compile_mcp_tool_definition, get_mcp_tool_name
29
28
  from vellum.workflows.workflows.event_filters import all_workflow_event_filter
30
29
 
31
30
 
@@ -162,7 +161,7 @@ class ToolCallingNode(BaseNode[StateType], Generic[StateType]):
162
161
  self._function_nodes = {}
163
162
  for function in self.functions:
164
163
  if isinstance(function, MCPServer):
165
- tool_definitions = hydrate_mcp_tool_definitions(function)
164
+ tool_definitions = compile_mcp_tool_definition(function)
166
165
  for tool_definition in tool_definitions:
167
166
  function_name = get_mcp_tool_name(tool_definition)
168
167
 
@@ -33,6 +33,7 @@ from vellum.workflows.state.encoder import DefaultStateEncoder
33
33
  from vellum.workflows.types.core import EntityInputsInterface, MergeBehavior, Tool, ToolBase
34
34
  from vellum.workflows.types.definition import ComposioToolDefinition, DeploymentDefinition, MCPServer, MCPToolDefinition
35
35
  from vellum.workflows.types.generics import is_workflow_class
36
+ from vellum.workflows.utils.functions import compile_mcp_tool_definition, get_mcp_tool_name
36
37
 
37
38
  CHAT_HISTORY_VARIABLE = "chat_history"
38
39
 
@@ -303,26 +304,6 @@ def _hydrate_composio_tool_definition(tool_def: ComposioToolDefinition) -> Funct
303
304
  )
304
305
 
305
306
 
306
- def hydrate_mcp_tool_definitions(server_def: MCPServer) -> List[MCPToolDefinition]:
307
- """Hydrate an MCPToolDefinition with detailed information from the MCP server.
308
-
309
- We do tool discovery on the MCP server to get the tool definitions.
310
-
311
- Args:
312
- tool_def: The basic MCPToolDefinition to enhance
313
-
314
- Returns:
315
- MCPToolDefinition with detailed parameters and description
316
- """
317
- try:
318
- mcp_service = MCPService()
319
- return mcp_service.hydrate_tool_definitions(server_def)
320
- except Exception as e:
321
- # If hydration fails, log and return original
322
- logger.warning(f"Failed to enhance MCP server '{server_def.name}': {e}")
323
- return []
324
-
325
-
326
307
  def create_tool_prompt_node(
327
308
  ml_model: str,
328
309
  blocks: List[Union[PromptBlock, Dict[str, Any]]],
@@ -341,17 +322,6 @@ def create_tool_prompt_node(
341
322
  # Get Composio tool details and hydrate the function definition
342
323
  enhanced_function = _hydrate_composio_tool_definition(function)
343
324
  prompt_functions.append(enhanced_function)
344
- elif isinstance(function, MCPServer):
345
- tool_functions: List[MCPToolDefinition] = hydrate_mcp_tool_definitions(function)
346
- for tool_function in tool_functions:
347
- name = get_mcp_tool_name(tool_function)
348
- prompt_functions.append(
349
- FunctionDefinition(
350
- name=name,
351
- description=tool_function.description,
352
- parameters=tool_function.parameters,
353
- )
354
- )
355
325
  else:
356
326
  prompt_functions.append(function)
357
327
  else:
@@ -440,7 +410,7 @@ def create_router_node(
440
410
  port = create_port_condition(function_name)
441
411
  setattr(Ports, function_name, port)
442
412
  elif isinstance(function, MCPServer):
443
- tool_functions: List[MCPToolDefinition] = hydrate_mcp_tool_definitions(function)
413
+ tool_functions: List[MCPToolDefinition] = compile_mcp_tool_definition(function)
444
414
  for tool_function in tool_functions:
445
415
  name = get_mcp_tool_name(tool_function)
446
416
  port = create_port_condition(name)
@@ -604,8 +574,3 @@ def get_function_name(function: ToolBase) -> str:
604
574
  return function.name # type: ignore[return-value]
605
575
  else:
606
576
  return snake_case(function.__name__)
607
-
608
-
609
- def get_mcp_tool_name(tool_def: MCPToolDefinition) -> str:
610
- server_name = snake_case(tool_def.server.name)
611
- return f"{server_name}__{tool_def.name}"
@@ -86,23 +86,23 @@ def test_parse_type_from_str_list_of_models():
86
86
  List[str],
87
87
  NodeException,
88
88
  WorkflowErrorCode.INVALID_OUTPUTS,
89
- "Invalid JSON Array format for result_as_str",
89
+ "Invalid JSON format: expected a valid JSON array",
90
90
  ),
91
91
  (
92
92
  "{invalid json}",
93
93
  Person,
94
94
  NodeException,
95
95
  WorkflowErrorCode.INVALID_OUTPUTS,
96
- "Invalid JSON format for result_as_str",
96
+ "Invalid JSON format: unable to parse the provided data",
97
97
  ),
98
98
  (
99
99
  "{invalid json}",
100
100
  Json,
101
101
  NodeException,
102
102
  WorkflowErrorCode.INVALID_OUTPUTS,
103
- "Invalid JSON format for result_as_str",
103
+ "Invalid JSON format: unable to parse the provided data",
104
104
  ),
105
- ('{"name": "Alice"}', List[str], ValueError, None, "Expected a list of items for result_as_str, received dict"),
105
+ ('{"name": "Alice"}', List[str], ValueError, None, "Expected a list of items, but received dict"),
106
106
  ("data", object, ValueError, None, "Unsupported output type: <class 'object'>"),
107
107
  ],
108
108
  ids=[
@@ -129,11 +129,11 @@ def parse_type_from_str(result_as_str: str, output_type: Any) -> Any:
129
129
  # raise ValueError("Invalid JSON Array format for result_as_str")
130
130
  raise NodeException(
131
131
  code=WorkflowErrorCode.INVALID_OUTPUTS,
132
- message="Invalid JSON Array format for result_as_str",
132
+ message="Invalid JSON format: expected a valid JSON array",
133
133
  )
134
134
 
135
135
  if not isinstance(data, list):
136
- raise ValueError(f"Expected a list of items for result_as_str, received {data.__class__.__name__}")
136
+ raise ValueError(f"Expected a list of items, but received {data.__class__.__name__}")
137
137
 
138
138
  inner_type = get_args(output_type)[0]
139
139
  if issubclass(inner_type, BaseModel):
@@ -151,7 +151,7 @@ def parse_type_from_str(result_as_str: str, output_type: Any) -> Any:
151
151
  except json.JSONDecodeError:
152
152
  raise NodeException(
153
153
  code=WorkflowErrorCode.INVALID_OUTPUTS,
154
- message="Invalid JSON format for result_as_str",
154
+ message="Invalid JSON format: unable to parse the provided data",
155
155
  )
156
156
 
157
157
  if get_origin(output_type) is Union:
@@ -177,7 +177,7 @@ def parse_type_from_str(result_as_str: str, output_type: Any) -> Any:
177
177
  except json.JSONDecodeError:
178
178
  raise NodeException(
179
179
  code=WorkflowErrorCode.INVALID_OUTPUTS,
180
- message="Invalid JSON format for result_as_str",
180
+ message="Invalid JSON format: unable to parse the provided data",
181
181
  )
182
182
 
183
183
  raise ValueError(f"Unsupported output type: {output_type}")
@@ -1,9 +1,10 @@
1
- from typing import Generic, Optional, Sequence
1
+ from typing import Generic, Optional, Sequence, Union
2
2
 
3
3
  import dotenv
4
4
 
5
5
  from vellum.workflows.events.workflow import WorkflowEventStream
6
6
  from vellum.workflows.inputs.base import BaseInputs
7
+ from vellum.workflows.inputs.dataset_row import DatasetRow
7
8
  from vellum.workflows.logging import load_logger
8
9
  from vellum.workflows.types.generics import WorkflowType
9
10
  from vellum.workflows.workflows.event_filters import root_workflow_event_filter
@@ -14,7 +15,7 @@ class WorkflowSandboxRunner(Generic[WorkflowType]):
14
15
  self,
15
16
  workflow: WorkflowType,
16
17
  inputs: Optional[Sequence[BaseInputs]] = None, # DEPRECATED - remove in v2.0.0
17
- dataset: Optional[Sequence[BaseInputs]] = None,
18
+ dataset: Optional[Sequence[Union[BaseInputs, DatasetRow]]] = None,
18
19
  ):
19
20
  dotenv.load_dotenv()
20
21
  self._logger = load_logger()
@@ -50,6 +51,9 @@ class WorkflowSandboxRunner(Generic[WorkflowType]):
50
51
 
51
52
  selected_inputs = self._inputs[index]
52
53
 
54
+ if isinstance(selected_inputs, DatasetRow):
55
+ selected_inputs = selected_inputs.inputs
56
+
53
57
  events = self._workflow.stream(
54
58
  inputs=selected_inputs,
55
59
  event_filter=root_workflow_event_filter,
@@ -0,0 +1,99 @@
1
+ from typing import Optional
2
+
3
+ from vellum.client.types.chat_message import ChatMessage
4
+ from vellum.workflows.inputs.base import BaseInputs
5
+ from vellum.workflows.inputs.dataset_row import DatasetRow
6
+
7
+
8
+ def test_dataset_row_serialization():
9
+ """
10
+ Test that DatasetRow can be properly serialized to JSON and back.
11
+ """
12
+
13
+ class TestInputs(BaseInputs):
14
+ message: str
15
+ count: int
16
+ optional_field: Optional[str] = None
17
+ chat_history: list[ChatMessage]
18
+
19
+ test_inputs = TestInputs(
20
+ message="Hello World",
21
+ count=42,
22
+ optional_field="test",
23
+ chat_history=[ChatMessage(text="Hello", role="USER"), ChatMessage(text="Hi there!", role="ASSISTANT")],
24
+ )
25
+ dataset_row = DatasetRow(label="test_label", inputs=test_inputs)
26
+
27
+ serialized_dict = dataset_row.model_dump()
28
+
29
+ assert "label" in serialized_dict
30
+ assert "inputs" in serialized_dict
31
+ assert serialized_dict["label"] == "test_label"
32
+
33
+ inputs_data = serialized_dict["inputs"]
34
+ assert inputs_data["message"] == "Hello World"
35
+ assert inputs_data["count"] == 42
36
+ assert inputs_data["optional_field"] == "test"
37
+ assert "chat_history" in inputs_data
38
+ assert len(inputs_data["chat_history"]) == 2
39
+ assert inputs_data["chat_history"][0]["text"] == "Hello"
40
+ assert inputs_data["chat_history"][0]["role"] == "USER"
41
+ assert inputs_data["chat_history"][1]["text"] == "Hi there!"
42
+ assert inputs_data["chat_history"][1]["role"] == "ASSISTANT"
43
+
44
+
45
+ def test_dataset_row_dict_serialization():
46
+ """
47
+ Test that DatasetRow can be properly converted to dict.
48
+ """
49
+
50
+ class SimpleInputs(BaseInputs):
51
+ text: str
52
+
53
+ simple_inputs = SimpleInputs(text="sample text")
54
+ dataset_row = DatasetRow(label="simple_label", inputs=simple_inputs)
55
+
56
+ result_dict = dataset_row.model_dump()
57
+
58
+ assert result_dict["label"] == "simple_label"
59
+ assert result_dict["inputs"]["text"] == "sample text"
60
+
61
+
62
+ def test_dataset_row_with_multiple_fields():
63
+ """
64
+ Test that DatasetRow works with BaseInputs that have multiple fields.
65
+ """
66
+
67
+ class MultiFieldInputs(BaseInputs):
68
+ text_field: str
69
+ number_field: int
70
+ optional_field: Optional[str] = None
71
+
72
+ multi_inputs = MultiFieldInputs(text_field="test_text", number_field=456, optional_field="optional_value")
73
+ dataset_row = DatasetRow(label="multi_field_test", inputs=multi_inputs)
74
+
75
+ result_dict = dataset_row.model_dump()
76
+
77
+ assert result_dict["label"] == "multi_field_test"
78
+ assert result_dict["inputs"]["text_field"] == "test_text"
79
+ assert result_dict["inputs"]["number_field"] == 456
80
+ assert result_dict["inputs"]["optional_field"] == "optional_value"
81
+
82
+
83
+ def test_dataset_row_with_default_inputs():
84
+ """
85
+ Test that DatasetRow works with BaseInputs that have default values.
86
+ """
87
+
88
+ class InputsWithDefaults(BaseInputs):
89
+ required_field: str
90
+ optional_with_default: str = "default_value"
91
+
92
+ inputs_with_defaults = InputsWithDefaults(required_field="required_value")
93
+ dataset_row = DatasetRow(label="defaults_test", inputs=inputs_with_defaults)
94
+
95
+ serialized_dict = dataset_row.model_dump()
96
+
97
+ assert serialized_dict["label"] == "defaults_test"
98
+ assert serialized_dict["inputs"]["required_field"] == "required_value"
99
+ assert serialized_dict["inputs"]["optional_with_default"] == "default_value"
@@ -2,13 +2,14 @@ import importlib
2
2
  import inspect
3
3
  from types import FrameType
4
4
  from uuid import UUID
5
- from typing import Annotated, Any, Dict, Literal, Optional, Union
5
+ from typing import Annotated, Any, Dict, List, Literal, Optional, Union
6
6
 
7
7
  from pydantic import BeforeValidator, SerializationInfo, model_serializer
8
8
 
9
9
  from vellum import Vellum
10
10
  from vellum.client.core.pydantic_utilities import UniversalBaseModel
11
11
  from vellum.client.types.code_resource_definition import CodeResourceDefinition as ClientCodeResourceDefinition
12
+ from vellum.client.types.vellum_variable import VellumVariable
12
13
  from vellum.workflows.constants import AuthorizationType
13
14
  from vellum.workflows.references.environment_variable import EnvironmentVariableReference
14
15
 
@@ -82,6 +83,7 @@ class DeploymentDefinition(UniversalBaseModel):
82
83
  # hydrated fields
83
84
  name: Optional[str] = None
84
85
  description: Optional[str] = None
86
+ input_variables: Optional[List[VellumVariable]] = None
85
87
 
86
88
  def _is_uuid(self) -> bool:
87
89
  """Check if the deployment field is a valid UUID."""
@@ -105,6 +107,25 @@ class DeploymentDefinition(UniversalBaseModel):
105
107
  return self.deployment
106
108
  return None
107
109
 
110
+ def get_release_info(self, client: Vellum):
111
+ try:
112
+ release = client.workflow_deployments.retrieve_workflow_deployment_release(
113
+ self.deployment, self.release_tag
114
+ )
115
+ except Exception:
116
+ # If we fail to get the release info, we'll use the deployment name and description
117
+ return {
118
+ "name": self.deployment,
119
+ "description": f"Workflow Deployment for {self.deployment}",
120
+ "input_variables": [],
121
+ }
122
+
123
+ return {
124
+ "name": release.deployment.name,
125
+ "description": release.description or f"Workflow Deployment for {self.deployment}",
126
+ "input_variables": release.workflow_version.input_variables,
127
+ }
128
+
108
129
  @model_serializer(mode="wrap")
109
130
  def _serialize(self, handler, info: SerializationInfo):
110
131
  """Allow Pydantic to serialize directly given a `client` in context.
@@ -115,16 +136,11 @@ class DeploymentDefinition(UniversalBaseModel):
115
136
  client: Optional[Vellum] = context.get("client") if context else None
116
137
 
117
138
  if client:
118
- release = client.workflow_deployments.retrieve_workflow_deployment_release(
119
- self.deployment, self.release_tag
120
- )
121
- self.name = release.deployment.name or self.deployment
122
- self.description = release.description or f"Workflow Deployment for {self.deployment}"
123
-
139
+ release_info = self.get_release_info(client)
124
140
  return {
125
141
  "type": "WORKFLOW_DEPLOYMENT",
126
- "name": self.name,
127
- "description": self.description,
142
+ "name": release_info["name"],
143
+ "description": release_info["description"],
128
144
  "deployment": self.deployment,
129
145
  "release_tag": self.release_tag,
130
146
  }
@@ -1,6 +1,19 @@
1
1
  import dataclasses
2
2
  import inspect
3
- from typing import TYPE_CHECKING, Annotated, Any, Callable, Dict, Literal, Optional, Type, Union, get_args, get_origin
3
+ from typing import (
4
+ TYPE_CHECKING,
5
+ Annotated,
6
+ Any,
7
+ Callable,
8
+ Dict,
9
+ List,
10
+ Literal,
11
+ Optional,
12
+ Type,
13
+ Union,
14
+ get_args,
15
+ get_origin,
16
+ )
4
17
 
5
18
  from pydantic import BaseModel
6
19
  from pydantic_core import PydanticUndefined
@@ -8,6 +21,8 @@ from pydash import snake_case
8
21
 
9
22
  from vellum import Vellum
10
23
  from vellum.client.types.function_definition import FunctionDefinition
24
+ from vellum.workflows.integrations.mcp_service import MCPService
25
+ from vellum.workflows.types.definition import MCPServer, MCPToolDefinition
11
26
  from vellum.workflows.utils.vellum_variables import vellum_variable_type_to_openapi_type
12
27
 
13
28
  if TYPE_CHECKING:
@@ -261,6 +276,30 @@ def compile_workflow_deployment_function_definition(
261
276
  )
262
277
 
263
278
 
279
+ def get_mcp_tool_name(tool_def: MCPToolDefinition) -> str:
280
+ """Generate a unique name for an MCP tool by combining server and tool names."""
281
+ server_name = snake_case(tool_def.server.name)
282
+ return f"{server_name}__{tool_def.name}"
283
+
284
+
285
+ def compile_mcp_tool_definition(server_def: MCPServer) -> List[MCPToolDefinition]:
286
+ """Hydrate an MCPToolDefinition with detailed information from the MCP server.
287
+
288
+ We do tool discovery on the MCP server to get the tool definitions.
289
+
290
+ Args:
291
+ tool_def: The basic MCPToolDefinition to enhance
292
+
293
+ Returns:
294
+ MCPToolDefinition with detailed parameters and description
295
+ """
296
+ try:
297
+ mcp_service = MCPService()
298
+ return mcp_service.hydrate_tool_definitions(server_def)
299
+ except Exception:
300
+ return []
301
+
302
+
264
303
  def use_tool_inputs(**inputs):
265
304
  """
266
305
  Decorator to specify which parameters of a tool function should be provided
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 1.3.10
3
+ Version: 1.4.0
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -98,10 +98,10 @@ vellum_ee/workflows/display/tests/workflow_serialization/test_basic_subworkflow_
98
98
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_templating_node_serialization.py,sha256=ddPa8gNBYH2tWk92ymngY7M8n74J-8CEre50HISP_-g,7877
99
99
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_terminal_node_serialization.py,sha256=A7Ef8P1-Nyvsb97bumKT9W2R1LuZaY9IKFV-7iRueog,4010
100
100
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_composio_serialization.py,sha256=oVXCjkU0G56QJmqnd_xIwF3D9bhJwALFibM2wmRhwUk,3739
101
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_serialization.py,sha256=1GiF3XgJ85vE05_nZFo66snNuqTt0p1c22gNI9c_BXY,26442
101
+ vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_serialization.py,sha256=UD1HU8aEAXSo43JGhjBGE2FGsknHj_zOsIHTo2TLEW0,26470
102
102
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_mcp_serialization.py,sha256=QhQbijeCnFeX1i3SMjHJg2WVAEt5JEO3dhFRv-mofdA,2458
103
103
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_parent_input.py,sha256=__LX4cuzbyZp_1wc-SI8X_J0tnhOkCEmRVUWLKI5aQM,4578
104
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py,sha256=VB5c6U0GRUxPm9FbzgKYIpoZfoIU9szAWUQJ5L-4Lug,10187
104
+ vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py,sha256=tyRlb6ozcsvkeHOC9dsHc4jg3HrHQITDGadi8qbNGns,10215
105
105
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_workflow_deployment_serialization.py,sha256=XIZZr5POo2NLn2uEWm9EC3rejeBMoO4X-JtzTH6mvp4,4074
106
106
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_try_node_serialization.py,sha256=pLCyMScV88DTBXRH7jXaXOEA1GBq8NIipCUFwIAWnwI,2771
107
107
  vellum_ee/workflows/display/tests/workflow_serialization/test_complex_terminal_node_serialization.py,sha256=exT7U-axwtYgFylagScflSQLJEND51qIAx2UATju6JM,6023
@@ -121,7 +121,7 @@ vellum_ee/workflows/display/utils/tests/test_events.py,sha256=42IEBnMbaQrH8gigw5
121
121
  vellum_ee/workflows/display/utils/vellum.py,sha256=sZwU0KdmZZTKWW62SyxJTl2tC8tN6p_BpZ-lDoinV-U,5670
122
122
  vellum_ee/workflows/display/vellum.py,sha256=J2mdJZ1sdLW535DDUkq_Vm8Z572vhuxHxVZF9deKSdk,391
123
123
  vellum_ee/workflows/display/workflows/__init__.py,sha256=JTB9ObEV3l4gGGdtfBHwVJtTTKC22uj-a-XjTVwXCyA,148
124
- vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256=4aC042Fmy9h-QUD6Z6inLbrR-7wE0t_V3NMb44Y0wek,43692
124
+ vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256=tlhtdNFT4_NJlBDkcn13CQOA49iy6d1F7kl5ONq21Rw,44032
125
125
  vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py,sha256=gxz76AeCqgAZ9D2lZeTiZzxY9eMgn3qOSfVgiqYcOh8,2028
126
126
  vellum_ee/workflows/display/workflows/tests/test_workflow_display.py,sha256=OKf_WVoPkYPrielOz8CyI5AjWt9MS2nSbWQKpF7HSLI,37847
127
127
  vellum_ee/workflows/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -147,7 +147,7 @@ vellum_ee/workflows/tests/local_workflow/nodes/templating_node.py,sha256=NQwFN61
147
147
  vellum_ee/workflows/tests/local_workflow/workflow.py,sha256=A4qOzOPNwePYxWbcAgIPLsmrVS_aVEZEc-wULSv787Q,393
148
148
  vellum_ee/workflows/tests/test_display_meta.py,sha256=PkXJVnMZs9GNooDkd59n4YTBAX3XGPQWeSSVbhehVFM,5112
149
149
  vellum_ee/workflows/tests/test_registry.py,sha256=B8xRIuEyLWfSqrYoPldNQXhKPfe50PllvtAZoI8-uPs,6066
150
- vellum_ee/workflows/tests/test_serialize_module.py,sha256=lk-4dVnG2HcxxywBXxDR1ieH8D9RJt4lvchoZhtQPdU,2892
150
+ vellum_ee/workflows/tests/test_serialize_module.py,sha256=d4ZpMd3oIxiq-sBXeSQESS6ix6-1P6rdCRFqBEReJIU,2882
151
151
  vellum_ee/workflows/tests/test_server.py,sha256=dXFBraU99Y6cKp2aBhLFXQTScSRcE9WaWjo1z9piqdU,23344
152
152
  vellum_ee/workflows/tests/test_virtual_files.py,sha256=TJEcMR0v2S8CkloXNmCHA0QW0K6pYNGaIjraJz7sFvY,2762
153
153
  vellum/__init__.py,sha256=lv4OTbPgTegugVOpRy5xJPPwMvC1Zqrg4oFMEwY1KSg,47428
@@ -155,7 +155,7 @@ vellum/client/README.md,sha256=flqu57ubZNTfpq60CdLtJC9gp4WEkyjb_n_eZ4OYf9w,6497
155
155
  vellum/client/__init__.py,sha256=T5Ht_w-Mk_9nzGqdadhQB8V20M0vYj7am06ut0A3P1o,73401
156
156
  vellum/client/core/__init__.py,sha256=lTcqUPXcx4112yLDd70RAPeqq6tu3eFMe1pKOqkW9JQ,1562
157
157
  vellum/client/core/api_error.py,sha256=44vPoTyWN59gonCIZMdzw7M1uspygiLnr3GNFOoVL2Q,614
158
- vellum/client/core/client_wrapper.py,sha256=r-GaDOv2AZPXjGOGdRTzl4QBhv3rREaY-YqQLoi27ds,2842
158
+ vellum/client/core/client_wrapper.py,sha256=s31x1QZneMOjgCfxuF5_0xmRGacwTOb9n0pFobRD42Y,2840
159
159
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
160
160
  vellum/client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
161
161
  vellum/client/core/force_multipart.py,sha256=awxh5MtcRYe74ehY8U76jzv6fYM_w_D3Rur7KQQzSDk,429
@@ -250,6 +250,8 @@ vellum/client/resources/workspace_secrets/raw_client.py,sha256=ZfiNd1NisolmK07QP
250
250
  vellum/client/resources/workspaces/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
251
251
  vellum/client/resources/workspaces/client.py,sha256=36KYa2FDu6h65q2GscUFOJs4qKeiOA6grOYoCc0Gi3E,2936
252
252
  vellum/client/resources/workspaces/raw_client.py,sha256=M3Ewk1ZfEZ44EeTvBtBNoNKi5whwfLY-1GR07SyfDTI,3517
253
+ vellum/client/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
254
+ vellum/client/tests/test_utils.py,sha256=zk8z45-2xrm9sZ2hq8PTqY8MXmXtPqMqYK0VBBX0GHg,1176
253
255
  vellum/client/types/__init__.py,sha256=ZzAl8sIsRT3hkRaCO_tlR9fjgVIM7sQCxmLO_S5WrIo,71915
254
256
  vellum/client/types/ad_hoc_execute_prompt_event.py,sha256=B69EesIH6fpNsdoiJaSG9zF1Sl17FnjoTu4CBkUSoHk,608
255
257
  vellum/client/types/ad_hoc_expand_meta.py,sha256=Kajcj3dKKed5e7uZibRnE3ZonK_bB2JPM-3aLjLfUp4,1295
@@ -794,7 +796,7 @@ vellum/client/types/token_overlapping_window_chunking_request.py,sha256=ez-hqu3q
794
796
  vellum/client/types/unit_enum.py,sha256=BKWRVp2WfHtGK4D6TsolhNJHGHfExzrRHkFn8H8QkwQ,113
795
797
  vellum/client/types/upload_document_response.py,sha256=C-JP0uAraD8waVwCNRM4FRCRglmv78m532gpy2Q9_Oc,613
796
798
  vellum/client/types/upsert_test_suite_test_case_request.py,sha256=VZGW1_Bw5cvU7RDg7MhT3A9C3ftyeYnAJaDMiil1FuQ,1978
797
- vellum/client/types/variable_prompt_block.py,sha256=417cXRq9V33iWTDWiX941VzOwhzfHvUHYPjgUev9Kf0,910
799
+ vellum/client/types/variable_prompt_block.py,sha256=sGLrhpDIj7ThxYZXvE3DsMGdQn-F2uoZ_GfTn-pMKjE,1255
798
800
  vellum/client/types/vellum_audio.py,sha256=rgL5GPBKDYk0PPQX_2XkerVERBxra9OR3k_PP57ruTc,685
799
801
  vellum/client/types/vellum_audio_request.py,sha256=vtkd49BVlLrY9UF3Yk52P5sDbtdY7sY9_XeMBU_VDm0,692
800
802
  vellum/client/types/vellum_code_resource_definition.py,sha256=XdueTR342BDjevZ3ktJJI99RqRED4A5SUOyzPt2K6us,661
@@ -915,6 +917,7 @@ vellum/client/types/workflow_sandbox_parent_context.py,sha256=8FJ8vq7CVTg-OnOXlb
915
917
  vellum/client/types/workflow_stream_event.py,sha256=6dQx_D-UPfJVrIsSW6krmZKDKeP9kPojFHDgqy_58io,362
916
918
  vellum/client/types/workspace_read.py,sha256=NAVouD25ZHvldLrAvAOwL2w2f1tqZaSC05wZ--YvBCQ,661
917
919
  vellum/client/types/workspace_secret_read.py,sha256=qWabw1u5HZpuv77kAwtAQigj4CDB41csJ2wmXu5eJS8,678
920
+ vellum/client/utils.py,sha256=b_ajWqSrXigPbUVOGTdzy_gL0iQo9r1OMUH8quuhjuA,885
918
921
  vellum/core/__init__.py,sha256=Iph1pJ7wkjHt7Xh1vUeLgMqFCNa0GYjtu3BhG8EhV8Y,136
919
922
  vellum/core/api_error.py,sha256=GDjkxQb8k4HqTthWpqwIVE7hLVXJQmRT8sip2mzB-8I,146
920
923
  vellum/core/client_wrapper.py,sha256=d3MqLPTOfMq8-AaHg3S7BkBczhyrbdpnkhR0FtOvV8A,151
@@ -1712,7 +1715,7 @@ vellum/utils/typing.py,sha256=wx_daFqD69cYkuJTVnvNrpjhqC3uuhbnyJ9_bIwC9OU,327
1712
1715
  vellum/utils/uuid.py,sha256=Ch6wWRgwICxLxJCTl5iE3EdRlZj2zADR-zUMUtjcMWM,214
1713
1716
  vellum/version.py,sha256=jq-1PlAYxN9AXuaZqbYk9ak27SgE2lw9Ia5gx1b1gVI,76
1714
1717
  vellum/workflows/README.md,sha256=hZdTKBIcsTKPofK68oPkBhyt0nnRh0csqC12k4FMHHA,3597
1715
- vellum/workflows/__init__.py,sha256=CssPsbNvN6rDhoLuqpEv7MMKGa51vE6dvAh6U31Pcio,71
1718
+ vellum/workflows/__init__.py,sha256=gd5AiZqVTcvqelhysG0jOWYfC6pJKRAVhS7qwf0bHU4,132
1716
1719
  vellum/workflows/constants.py,sha256=xweiPRUSVEnGz9BJvpIWu96Gfok89QneARu4K7wj7f8,1358
1717
1720
  vellum/workflows/context.py,sha256=ViyIeMDhUv-MhnynLaXPlvlbYxRU45ySvYidCNSbFZU,2458
1718
1721
  vellum/workflows/descriptors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1728,7 +1731,7 @@ vellum/workflows/emitters/vellum_emitter.py,sha256=ECBIRA48WS5rIJd1iWUfye7B5Up7u
1728
1731
  vellum/workflows/environment/__init__.py,sha256=TJz0m9dwIs6YOwCTeuN0HHsU-ecyjc1OJXx4AFy83EQ,121
1729
1732
  vellum/workflows/environment/environment.py,sha256=Ck3RPKXJvtMGx_toqYQQQF-ZwXm5ijVwJpEPTeIJ4_Q,471
1730
1733
  vellum/workflows/errors/__init__.py,sha256=tWGPu5xyAU8gRb8_bl0fL7OfU3wxQ9UH6qVwy4X4P_Q,113
1731
- vellum/workflows/errors/types.py,sha256=n89vGaCdRYKxv-JngGm9nkR5yXeWnR48CJ9L1iABLaQ,4144
1734
+ vellum/workflows/errors/types.py,sha256=1LQzsEwCL-kqLGUxZcgJWahL-XNfIOwCz_5f_fWzLrM,4236
1732
1735
  vellum/workflows/events/__init__.py,sha256=V4mh766fyA70WvHelm9kfVZGrUgEKcJ9tJt8EepfQYU,832
1733
1736
  vellum/workflows/events/context.py,sha256=vCfMIPmz4j9Om36rRWa35A_JU_VccWWS52_mZkkqxak,3345
1734
1737
  vellum/workflows/events/node.py,sha256=yHVd-rX2E3qc2XLnZr0fW6uq4ZCMm34mnY2tzYceyOg,5884
@@ -1789,8 +1792,9 @@ vellum/workflows/graph/__init__.py,sha256=3sHlay5d_-uD7j3QJXiGl0WHFZZ_QScRvgyDhN
1789
1792
  vellum/workflows/graph/graph.py,sha256=vkpteMc2a61IFGHlrA50J4ntVj6m3agcyWrXGQEbjHc,11280
1790
1793
  vellum/workflows/graph/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1791
1794
  vellum/workflows/graph/tests/test_graph.py,sha256=0Pov0sCsxjzUDL9wy7xy9jFD-F2GsMJnZVEVFXzQGdM,15433
1792
- vellum/workflows/inputs/__init__.py,sha256=AbFEteIYEvCb14fM3EK7bhM-40-6s494rSlIhQ4Dsss,62
1795
+ vellum/workflows/inputs/__init__.py,sha256=02pj0IbJkN1AxTreswK39cNi45tA8GWcAAdRJve4cuM,116
1793
1796
  vellum/workflows/inputs/base.py,sha256=w3owT5B3rLBmIj-v-jL2l-HD4yd3hXK9RmHVd557BpA,5126
1797
+ vellum/workflows/inputs/dataset_row.py,sha256=T8lcn9qyC7IY0w3EIfnn4AwZ3pYw9sf4kdIi0VkX_Sw,1033
1794
1798
  vellum/workflows/inputs/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1795
1799
  vellum/workflows/inputs/tests/test_inputs.py,sha256=lioA8917mFLYq7Ml69UNkqUjcWbbxkxnpIEJ4FBaYBk,2206
1796
1800
  vellum/workflows/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1840,7 +1844,7 @@ vellum/workflows/nodes/displayable/bases/api_node/tests/test_node.py,sha256=5C59
1840
1844
  vellum/workflows/nodes/displayable/bases/base_prompt_node/__init__.py,sha256=Org3xTvgp1pA0uUXFfnJr29D3HzCey2lEdYF4zbIUgo,70
1841
1845
  vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py,sha256=ea20icDM1HB942wkH-XtXNSNCBDcjeOiN3vowkHL4fs,4477
1842
1846
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/__init__.py,sha256=Hl35IAoepRpE-j4cALaXVJIYTYOF3qszyVbxTj4kS1s,82
1843
- vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=TkVfwD5-GGWG32vtGMo4ZjuxWrjVYbK7tDWq0U9OBCM,17316
1847
+ vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=eRhqxpkqcP5S4jx-vnnyHZvZAVQxKyCQ9kYxQCyWNvY,17921
1844
1848
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1845
1849
  vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py,sha256=xc53wGwVqxBnN7eoyWkJ-RJ-FeUpHKekkKjViASHAFg,27495
1846
1850
  vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py,sha256=H9mM75EQpP6PUvsXCTbwjw4CqMMLf36m1G2XqiPEvH4,12139
@@ -1894,13 +1898,13 @@ vellum/workflows/nodes/displayable/tests/test_search_node_error_handling.py,sha2
1894
1898
  vellum/workflows/nodes/displayable/tests/test_search_node_wth_text_output.py,sha256=VepO5z1277c1y5N6LLIC31nnWD1aak2m5oPFplfJHHs,6935
1895
1899
  vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py,sha256=Bjv-wZyFgNaVZb9KEMMZd9lFoLzbPEPjEMpANizMZw4,2413
1896
1900
  vellum/workflows/nodes/displayable/tool_calling_node/__init__.py,sha256=3n0-ysmFKsr40CVxPthc0rfJgqVJeZuUEsCmYudLVRg,117
1897
- vellum/workflows/nodes/displayable/tool_calling_node/node.py,sha256=J4RNOggTx5nzovC0354SPGV-NkRpYnV51PMTYQ7aIQ8,8202
1901
+ vellum/workflows/nodes/displayable/tool_calling_node/node.py,sha256=Exw0_5-bzphKF42pKwHF-tH25s8MD6wSbGcp7jZuBFg,8236
1898
1902
  vellum/workflows/nodes/displayable/tool_calling_node/state.py,sha256=CcBVb_YtwfSSka4ze678k6-qwmzMSfjfVP8_Y95feSo,302
1899
1903
  vellum/workflows/nodes/displayable/tool_calling_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1900
1904
  vellum/workflows/nodes/displayable/tool_calling_node/tests/test_composio_service.py,sha256=in1fbEz5x1tx3uKv9YXdvOncsHucNL8Ro6Go7lBuuOQ,8962
1901
1905
  vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py,sha256=GZoeybB9uM7ai8sBLAtUMHrMVgh-WrJDWrKZci6feDs,11892
1902
1906
  vellum/workflows/nodes/displayable/tool_calling_node/tests/test_utils.py,sha256=SIu5GCj4tIE4fz-cAcdULtQfqZIhrcc3Doo6TWLXBws,8804
1903
- vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=c0fJA-de3yJKGTzKfjyZOkbVhIndSAhZqBZp_DpU1fg,24158
1907
+ vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=6XsvA77wF9MI6KBQNN1JMdpQwhK8pKUQtXgsPmq_I3Q,22837
1904
1908
  vellum/workflows/nodes/displayable/web_search_node/__init__.py,sha256=8FOnEP-n-U68cvxTlJW9wphIAGHq5aqjzLM-DoSSXnU,61
1905
1909
  vellum/workflows/nodes/displayable/web_search_node/node.py,sha256=NQYux2bOtuBF5E4tn-fXi5y3btURPRrNqMSM9MAZYI4,5091
1906
1910
  vellum/workflows/nodes/displayable/web_search_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1912,8 +1916,8 @@ vellum/workflows/nodes/experimental/openai_chat_completion_node/node.py,sha256=c
1912
1916
  vellum/workflows/nodes/mocks.py,sha256=a1FjWEIocseMfjzM-i8DNozpUsaW0IONRpZmXBoWlyc,10455
1913
1917
  vellum/workflows/nodes/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1914
1918
  vellum/workflows/nodes/tests/test_mocks.py,sha256=mfPvrs75PKcsNsbJLQAN6PDFoVqs9TmQxpdyFKDdO60,7837
1915
- vellum/workflows/nodes/tests/test_utils.py,sha256=BUugAHx2C9YuCwTlsTXV1Glxca0kW3St6T9o_QFatSU,5649
1916
- vellum/workflows/nodes/utils.py,sha256=wCvf8K5qruT5GwtvnHcQ-LMllktTD8aaFmAGpKQy--c,10720
1919
+ vellum/workflows/nodes/tests/test_utils.py,sha256=6yn0ieMug-ndcPVR2Z0HLIAMuCuS-4ucKSMnx06qcEc,5674
1920
+ vellum/workflows/nodes/utils.py,sha256=4u7gJnJtI1CM0RmsS6c-edTeGrJvd340rDiEtP6jRlk,10745
1917
1921
  vellum/workflows/outputs/__init__.py,sha256=AyZ4pRh_ACQIGvkf0byJO46EDnSix1ZCAXfvh-ms1QE,94
1918
1922
  vellum/workflows/outputs/base.py,sha256=zy02zr9DmG3j7Xp3Q8xiOiXFF_c7uNh76jf2LiMS-qE,10132
1919
1923
  vellum/workflows/ports/__init__.py,sha256=bZuMt-R7z5bKwpu4uPW7LlJeePOQWmCcDSXe5frUY5g,101
@@ -1940,7 +1944,7 @@ vellum/workflows/resolvers/tests/test_resolver.py,sha256=jXkJBb9SwtoH__bBN-ECohp
1940
1944
  vellum/workflows/resolvers/types.py,sha256=Hndhlk69g6EKLh_LYg5ILepW5U_h_BYNllfzhS9k8p4,237
1941
1945
  vellum/workflows/runner/__init__.py,sha256=i1iG5sAhtpdsrlvwgH6B-m49JsINkiWyPWs8vyT-bqM,72
1942
1946
  vellum/workflows/runner/runner.py,sha256=lnVbqA1nSdWuyY1SZDpLDvnpLRQcQyWYOWrx3RIJpcg,41043
1943
- vellum/workflows/sandbox.py,sha256=jwlFFQjHDwmbVoBah_Q3i8K_BrzOt-F6TXFauiyVyIk,3021
1947
+ vellum/workflows/sandbox.py,sha256=mezSZmilR_fwR8164n8CEfzlMeQ55IqfapHp4ftImvQ,3212
1944
1948
  vellum/workflows/state/__init__.py,sha256=yUUdR-_Vl7UiixNDYQZ-GEM_kJI9dnOia75TtuNEsnE,60
1945
1949
  vellum/workflows/state/base.py,sha256=m9fCqbZn21GshCVCjJTD1dPZEQjFrsMXqlg7tM9fIwM,24283
1946
1950
  vellum/workflows/state/context.py,sha256=khM30U1iDNts5Xp8LXa_WfpkITNITexrDUUFJ5wZ2W4,8445
@@ -1950,12 +1954,13 @@ vellum/workflows/state/store.py,sha256=uVe-oN73KwGV6M6YLhwZMMUQhzTQomsVfVnb8V91g
1950
1954
  vellum/workflows/state/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1951
1955
  vellum/workflows/state/tests/test_state.py,sha256=zEVFIY2any41X2BA5Us_qqKpzH5HRqmyrUJ04GTO0pU,7484
1952
1956
  vellum/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1957
+ vellum/workflows/tests/test_dataset_row.py,sha256=qtz3cEcppCtr96co_8MmX4Jh1ro6bKZd_FGtRyFncRA,3370
1953
1958
  vellum/workflows/tests/test_sandbox.py,sha256=JKwaluI-lODQo7Ek9sjDstjL_WTdSqUlVik6ZVTfVOA,1826
1954
1959
  vellum/workflows/tests/test_undefined.py,sha256=zMCVliCXVNLrlC6hEGyOWDnQADJ2g83yc5FIM33zuo8,353
1955
1960
  vellum/workflows/types/__init__.py,sha256=KxUTMBGzuRCfiMqzzsykOeVvrrkaZmTTo1a7SLu8gRM,68
1956
1961
  vellum/workflows/types/code_execution_node_wrappers.py,sha256=fewX9bqF_4TZuK-gZYIn12s31-k03vHMGRpvFAPm11Y,3206
1957
1962
  vellum/workflows/types/core.py,sha256=TggDVs2lVya33xvu374EDhMC1b7RRlAAs0zWLaF46BA,1385
1958
- vellum/workflows/types/definition.py,sha256=esmlFZFA6wnzNtSYUWpIMFmhhLs_kCm_h_THroL3Zd8,6196
1963
+ vellum/workflows/types/definition.py,sha256=727tDk-XzLAlpK2_OuPICz2VnFLW5WZd-WcZQrd2lvY,6854
1959
1964
  vellum/workflows/types/generics.py,sha256=8jptbEx1fnJV0Lhj0MpCJOT6yNiEWeTOYOwrEAb5CRU,1576
1960
1965
  vellum/workflows/types/stack.py,sha256=h7NE0vXR7l9DevFBIzIAk1Zh59K-kECQtDTKOUunwMY,1314
1961
1966
  vellum/workflows/types/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1963,7 +1968,7 @@ vellum/workflows/types/tests/test_definition.py,sha256=rvDYjdJ1rvAv0qHBN7i7s-_WA
1963
1968
  vellum/workflows/types/tests/test_utils.py,sha256=UnZog59tR577mVwqZRqqWn2fScoOU1H6up0EzS8zYhw,2536
1964
1969
  vellum/workflows/types/utils.py,sha256=mTctHITBybpt4855x32oCKALBEcMNLn-9cCmfEKgJHQ,6498
1965
1970
  vellum/workflows/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1966
- vellum/workflows/utils/functions.py,sha256=6WRRMb_XbxtvhUKOJq5ZChy0KKvlBaQCBiPhvecXT7I,10029
1971
+ vellum/workflows/utils/functions.py,sha256=z1V_HujCZI-pfOLIEwkd0_ZeeR1MNqq3fvgDke62NRo,11016
1967
1972
  vellum/workflows/utils/hmac.py,sha256=JJCczc6pyV6DuE1Oa0QVfYPUN_of3zEYmGFib3OZnrE,1135
1968
1973
  vellum/workflows/utils/names.py,sha256=QtHquoaGqRseu5gg2OcVGI2d_CMcEOvjb9KspwH4C-A,552
1969
1974
  vellum/workflows/utils/pydantic_schema.py,sha256=eR_bBtY-T0pttJP-ARwagSdCOnwPUtiT3cegm2lzDTQ,1310
@@ -1982,8 +1987,8 @@ vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnad
1982
1987
  vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1983
1988
  vellum/workflows/workflows/tests/test_base_workflow.py,sha256=ptMntHzVyy8ZuzNgeTuk7hREgKQ5UBdgq8VJFSGaW4Y,20832
1984
1989
  vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
1985
- vellum_ai-1.3.10.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1986
- vellum_ai-1.3.10.dist-info/METADATA,sha256=rMxCNVxzMICH4KL7PndzBmRPdyejUvPmvuJnCS7goJc,5548
1987
- vellum_ai-1.3.10.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1988
- vellum_ai-1.3.10.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1989
- vellum_ai-1.3.10.dist-info/RECORD,,
1990
+ vellum_ai-1.4.0.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1991
+ vellum_ai-1.4.0.dist-info/METADATA,sha256=tI2aW2bZkPjR4_UtYRBydLigbdq2Et_pWewZJw1aKLE,5547
1992
+ vellum_ai-1.4.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1993
+ vellum_ai-1.4.0.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1994
+ vellum_ai-1.4.0.dist-info/RECORD,,
@@ -114,7 +114,7 @@ def test_serialize_workflow():
114
114
  "block_type": "VARIABLE",
115
115
  "state": None,
116
116
  "cache_config": None,
117
- "input_variable": "question",
117
+ "input_variable": "8eb8b551-9b48-43b3-861f-52adb5c585a8",
118
118
  }
119
119
  ],
120
120
  }
@@ -115,7 +115,7 @@ def test_serialize_workflow():
115
115
  "block_type": "VARIABLE",
116
116
  "state": None,
117
117
  "cache_config": None,
118
- "input_variable": "question",
118
+ "input_variable": "8eb8b551-9b48-43b3-861f-52adb5c585a8",
119
119
  }
120
120
  ],
121
121
  }
@@ -17,6 +17,7 @@ from vellum.workflows.descriptors.base import BaseDescriptor
17
17
  from vellum.workflows.edges import Edge
18
18
  from vellum.workflows.events.workflow import NodeEventDisplayContext, WorkflowEventDisplayContext
19
19
  from vellum.workflows.inputs.base import BaseInputs
20
+ from vellum.workflows.inputs.dataset_row import DatasetRow
20
21
  from vellum.workflows.nodes.bases import BaseNode
21
22
  from vellum.workflows.nodes.displayable.bases.utils import primitive_to_vellum_value
22
23
  from vellum.workflows.nodes.displayable.final_output_node.node import FinalOutputNode
@@ -913,7 +914,10 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
913
914
  if dataset_attr and isinstance(dataset_attr, list):
914
915
  dataset = []
915
916
  for i, inputs_obj in enumerate(dataset_attr):
916
- if isinstance(inputs_obj, BaseInputs):
917
+ if isinstance(inputs_obj, DatasetRow):
918
+ serialized_inputs = json.loads(json.dumps(inputs_obj.inputs, cls=DefaultStateEncoder))
919
+ dataset.append({"label": inputs_obj.label, "inputs": serialized_inputs})
920
+ elif isinstance(inputs_obj, BaseInputs):
917
921
  serialized_inputs = json.loads(json.dumps(inputs_obj, cls=DefaultStateEncoder))
918
922
  dataset.append({"label": f"Scenario {i + 1}", "inputs": serialized_inputs})
919
923
  except (ImportError, AttributeError):
@@ -24,12 +24,11 @@ def test_serialize_module_with_actual_dataset():
24
24
  assert isinstance(result.dataset, list)
25
25
  assert len(result.dataset) == 2
26
26
 
27
- for i, item in enumerate(result.dataset):
28
- assert "label" in item
29
- assert "inputs" in item
30
- assert item["label"] == f"Scenario {i + 1}"
31
- assert isinstance(item["inputs"], dict)
32
- assert "message" in item["inputs"]
27
+ assert result.dataset[0]["label"] == "Scenario 1"
28
+ assert result.dataset[0]["inputs"]["message"] == "World"
29
+
30
+ assert result.dataset[1]["label"] == "Custom Test"
31
+ assert result.dataset[1]["inputs"]["message"] == "DatasetRow Test"
33
32
 
34
33
 
35
34
  def test_serialize_module_happy_path():