vellum-ai 1.0.8__py3-none-any.whl → 1.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. vellum/client/core/client_wrapper.py +2 -2
  2. vellum/client/resources/workflows/client.py +8 -0
  3. vellum/client/types/document_document_to_document_index.py +1 -1
  4. vellum/client/types/organization_read.py +2 -0
  5. vellum/client/types/slim_document_document_to_document_index.py +1 -1
  6. vellum/plugins/pydantic.py +9 -2
  7. vellum/workflows/emitters/__init__.py +2 -0
  8. vellum/workflows/emitters/base.py +17 -0
  9. vellum/workflows/emitters/vellum_emitter.py +138 -0
  10. vellum/workflows/integrations/composio_service.py +18 -2
  11. vellum/workflows/integrations/mcp_service.py +245 -0
  12. vellum/workflows/nodes/displayable/tool_calling_node/node.py +23 -8
  13. vellum/workflows/nodes/displayable/tool_calling_node/tests/test_composio_service.py +43 -0
  14. vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py +22 -0
  15. vellum/workflows/nodes/displayable/tool_calling_node/tests/test_utils.py +8 -4
  16. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +108 -27
  17. vellum/workflows/types/core.py +4 -2
  18. vellum/workflows/types/definition.py +1 -0
  19. vellum/workflows/types/tests/test_definition.py +4 -1
  20. {vellum_ai-1.0.8.dist-info → vellum_ai-1.0.10.dist-info}/METADATA +1 -1
  21. {vellum_ai-1.0.8.dist-info → vellum_ai-1.0.10.dist-info}/RECORD +27 -25
  22. vellum_cli/push.py +4 -1
  23. vellum_cli/tests/test_push.py +4 -6
  24. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_composio_serialization.py +1 -0
  25. {vellum_ai-1.0.8.dist-info → vellum_ai-1.0.10.dist-info}/LICENSE +0 -0
  26. {vellum_ai-1.0.8.dist-info → vellum_ai-1.0.10.dist-info}/WHEEL +0 -0
  27. {vellum_ai-1.0.8.dist-info → vellum_ai-1.0.10.dist-info}/entry_points.txt +0 -0
@@ -25,10 +25,10 @@ class BaseClientWrapper:
25
25
 
26
26
  def get_headers(self) -> typing.Dict[str, str]:
27
27
  headers: typing.Dict[str, str] = {
28
- "User-Agent": "vellum-ai/1.0.8",
28
+ "User-Agent": "vellum-ai/1.0.10",
29
29
  "X-Fern-Language": "Python",
30
30
  "X-Fern-SDK-Name": "vellum-ai",
31
- "X-Fern-SDK-Version": "1.0.8",
31
+ "X-Fern-SDK-Version": "1.0.10",
32
32
  }
33
33
  if self._api_version is not None:
34
34
  headers["X-API-Version"] = self._api_version
@@ -27,6 +27,7 @@ class WorkflowsClient:
27
27
  id: str,
28
28
  *,
29
29
  exclude_code: typing.Optional[bool] = None,
30
+ exclude_display: typing.Optional[bool] = None,
30
31
  include_json: typing.Optional[bool] = None,
31
32
  include_sandbox: typing.Optional[bool] = None,
32
33
  strict: typing.Optional[bool] = None,
@@ -40,6 +41,8 @@ class WorkflowsClient:
40
41
 
41
42
  exclude_code : typing.Optional[bool]
42
43
 
44
+ exclude_display : typing.Optional[bool]
45
+
43
46
  include_json : typing.Optional[bool]
44
47
 
45
48
  include_sandbox : typing.Optional[bool]
@@ -60,6 +63,7 @@ class WorkflowsClient:
60
63
  method="GET",
61
64
  params={
62
65
  "exclude_code": exclude_code,
66
+ "exclude_display": exclude_display,
63
67
  "include_json": include_json,
64
68
  "include_sandbox": include_sandbox,
65
69
  "strict": strict,
@@ -165,6 +169,7 @@ class AsyncWorkflowsClient:
165
169
  id: str,
166
170
  *,
167
171
  exclude_code: typing.Optional[bool] = None,
172
+ exclude_display: typing.Optional[bool] = None,
168
173
  include_json: typing.Optional[bool] = None,
169
174
  include_sandbox: typing.Optional[bool] = None,
170
175
  strict: typing.Optional[bool] = None,
@@ -178,6 +183,8 @@ class AsyncWorkflowsClient:
178
183
 
179
184
  exclude_code : typing.Optional[bool]
180
185
 
186
+ exclude_display : typing.Optional[bool]
187
+
181
188
  include_json : typing.Optional[bool]
182
189
 
183
190
  include_sandbox : typing.Optional[bool]
@@ -198,6 +205,7 @@ class AsyncWorkflowsClient:
198
205
  method="GET",
199
206
  params={
200
207
  "exclude_code": exclude_code,
208
+ "exclude_display": exclude_display,
201
209
  "include_json": include_json,
202
210
  "include_sandbox": include_sandbox,
203
211
  "strict": strict,
@@ -22,7 +22,7 @@ class DocumentDocumentToDocumentIndex(UniversalBaseModel):
22
22
  Vellum-generated ID that uniquely identifies the environment index this document is included in.
23
23
  """
24
24
 
25
- document_index_id: str = pydantic.Field()
25
+ document_index_id: typing.Optional[str] = pydantic.Field(default=None)
26
26
  """
27
27
  Vellum-generated ID that uniquely identifies the index this document is included in.
28
28
  """
@@ -2,6 +2,7 @@
2
2
 
3
3
  from ..core.pydantic_utilities import UniversalBaseModel
4
4
  import typing
5
+ import datetime as dt
5
6
  from .new_member_join_behavior_enum import NewMemberJoinBehaviorEnum
6
7
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
7
8
  import pydantic
@@ -10,6 +11,7 @@ import pydantic
10
11
  class OrganizationRead(UniversalBaseModel):
11
12
  id: str
12
13
  name: str
14
+ created: typing.Optional[dt.datetime] = None
13
15
  allow_staff_access: typing.Optional[bool] = None
14
16
  new_member_join_behavior: NewMemberJoinBehaviorEnum
15
17
  limit_config: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
@@ -22,7 +22,7 @@ class SlimDocumentDocumentToDocumentIndex(UniversalBaseModel):
22
22
  Vellum-generated ID that uniquely identifies the environment index this document is included in.
23
23
  """
24
24
 
25
- document_index_id: str = pydantic.Field()
25
+ document_index_id: typing.Optional[str] = pydantic.Field(default=None)
26
26
  """
27
27
  Vellum-generated ID that uniquely identifies the index this document is included in.
28
28
  """
@@ -28,9 +28,12 @@ def import_base_descriptor():
28
28
  """
29
29
  We have to avoid importing from vellum.* in this file because it will cause a circular import.
30
30
  """
31
- from vellum.workflows.descriptors.base import BaseDescriptor
31
+ try:
32
+ from vellum.workflows.descriptors.base import BaseDescriptor
32
33
 
33
- return BaseDescriptor
34
+ return BaseDescriptor
35
+ except Exception:
36
+ return None
34
37
 
35
38
 
36
39
  # https://docs.pydantic.dev/2.8/concepts/plugins/#build-a-plugin
@@ -60,6 +63,10 @@ class OnValidatePython(ValidatePythonHandlerProtocol):
60
63
  self.tracked_descriptors = {}
61
64
  BaseDescriptor = import_base_descriptor()
62
65
 
66
+ # If BaseDescriptor import failed, skip descriptor processing
67
+ if BaseDescriptor is None:
68
+ return
69
+
63
70
  for key, value in input.items():
64
71
  field_info = model_fields.get(key)
65
72
  if isinstance(value, BaseDescriptor) and (
@@ -1,5 +1,7 @@
1
1
  from .base import BaseWorkflowEmitter
2
+ from .vellum_emitter import VellumEmitter
2
3
 
3
4
  __all__ = [
4
5
  "BaseWorkflowEmitter",
6
+ "VellumEmitter",
5
7
  ]
@@ -1,10 +1,27 @@
1
1
  from abc import ABC, abstractmethod
2
+ from typing import TYPE_CHECKING, Optional
2
3
 
3
4
  from vellum.workflows.events.workflow import WorkflowEvent
4
5
  from vellum.workflows.state.base import BaseState
5
6
 
7
+ # To protect against circular imports
8
+ if TYPE_CHECKING:
9
+ from vellum.workflows.state.context import WorkflowContext
10
+
6
11
 
7
12
  class BaseWorkflowEmitter(ABC):
13
+ def __init__(self):
14
+ self._context: Optional["WorkflowContext"] = None
15
+
16
+ def register_context(self, context: "WorkflowContext") -> None:
17
+ """
18
+ Register the workflow context with this emitter.
19
+
20
+ Args:
21
+ context: The workflow context containing shared resources like vellum_client.
22
+ """
23
+ self._context = context
24
+
8
25
  @abstractmethod
9
26
  def emit_event(self, event: WorkflowEvent) -> None:
10
27
  pass
@@ -0,0 +1,138 @@
1
+ import logging
2
+ import time
3
+ from typing import Any, Dict, Optional
4
+
5
+ import httpx
6
+
7
+ from vellum.workflows.emitters.base import BaseWorkflowEmitter
8
+ from vellum.workflows.events.types import default_serializer
9
+ from vellum.workflows.events.workflow import WorkflowEvent
10
+ from vellum.workflows.state.base import BaseState
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class VellumEmitter(BaseWorkflowEmitter):
16
+ """
17
+ Emitter that sends workflow events to Vellum's infrastructure for monitoring
18
+ externally hosted SDK-powered workflows.
19
+
20
+ Usage:
21
+ class MyWorkflow(BaseWorkflow):
22
+ emitters = [VellumEmitter]
23
+
24
+ The emitter will automatically use the same Vellum client configuration
25
+ as the workflow it's attached to.
26
+ """
27
+
28
+ def __init__(
29
+ self,
30
+ *,
31
+ timeout: Optional[float] = 30.0,
32
+ max_retries: int = 3,
33
+ ):
34
+ """
35
+ Initialize the VellumEmitter.
36
+
37
+ Args:
38
+ timeout: Request timeout in seconds.
39
+ max_retries: Maximum number of retry attempts for failed requests.
40
+ """
41
+ super().__init__()
42
+ self._timeout = timeout
43
+ self._max_retries = max_retries
44
+ self._events_endpoint = "events" # TODO: make this configurable with the correct url
45
+
46
+ def emit_event(self, event: WorkflowEvent) -> None:
47
+ """
48
+ Emit a workflow event to Vellum's infrastructure.
49
+
50
+ Args:
51
+ event: The workflow event to emit.
52
+ """
53
+ if not self._context:
54
+ return
55
+
56
+ try:
57
+ event_data = default_serializer(event)
58
+
59
+ self._send_event(event_data)
60
+
61
+ except Exception as e:
62
+ logger.exception(f"Failed to emit event {event.name}: {e}")
63
+
64
+ def snapshot_state(self, state: BaseState) -> None:
65
+ """
66
+ Send a state snapshot to Vellum's infrastructure.
67
+
68
+ Args:
69
+ state: The workflow state to snapshot.
70
+ """
71
+ pass
72
+
73
+ def _send_event(self, event_data: Dict[str, Any]) -> None:
74
+ """
75
+ Send event data to Vellum's events endpoint with retry logic.
76
+
77
+ Args:
78
+ event_data: The serialized event data to send.
79
+ """
80
+ if not self._context:
81
+ logger.warning("Cannot send event: No workflow context registered")
82
+ return
83
+
84
+ client = self._context.vellum_client
85
+
86
+ for attempt in range(self._max_retries + 1):
87
+ try:
88
+ # Use the Vellum client's underlying HTTP client to make the request
89
+ # For proper authentication headers and configuration
90
+ base_url = client._client_wrapper.get_environment().default
91
+ response = client._client_wrapper.httpx_client.request(
92
+ method="POST",
93
+ path=f"{base_url}/{self._events_endpoint}", # TODO: will be replaced with the correct url
94
+ json=event_data,
95
+ headers=client._client_wrapper.get_headers(),
96
+ request_options={"timeout_in_seconds": self._timeout},
97
+ )
98
+
99
+ response.raise_for_status()
100
+
101
+ if attempt > 0:
102
+ logger.info(f"Event sent successfully after {attempt + 1} attempts")
103
+ return
104
+
105
+ except httpx.HTTPStatusError as e:
106
+ if e.response.status_code >= 500:
107
+ # Server errors might be transient, retry
108
+ if attempt < self._max_retries:
109
+ wait_time = min(2**attempt, 60) # Exponential backoff, max 60s
110
+ logger.warning(
111
+ f"Server error emitting event (attempt {attempt + 1}/{self._max_retries + 1}): "
112
+ f"{e.response.status_code}. Retrying in {wait_time}s..."
113
+ )
114
+ time.sleep(wait_time)
115
+ continue
116
+ else:
117
+ logger.exception(
118
+ f"Server error emitting event after {self._max_retries + 1} attempts: "
119
+ f"{e.response.status_code} {e.response.text}"
120
+ )
121
+ return
122
+ else:
123
+ # Client errors (4xx) are not retriable
124
+ logger.exception(f"Client error emitting event: {e.response.status_code} {e.response.text}")
125
+ return
126
+
127
+ except httpx.RequestError as e:
128
+ if attempt < self._max_retries:
129
+ wait_time = min(2**attempt, 60) # Exponential backoff, max 60s
130
+ logger.warning(
131
+ f"Network error emitting event (attempt {attempt + 1}/{self._max_retries + 1}): "
132
+ f"{e}. Retrying in {wait_time}s..."
133
+ )
134
+ time.sleep(wait_time)
135
+ continue
136
+ else:
137
+ logger.exception(f"Network error emitting event after {self._max_retries + 1} attempts: {e}")
138
+ return
@@ -5,6 +5,7 @@ from typing import Any, Dict, List, Optional
5
5
 
6
6
  import requests
7
7
 
8
+ from vellum.workflows.errors.types import WorkflowErrorCode
8
9
  from vellum.workflows.exceptions import NodeException
9
10
 
10
11
  logger = logging.getLogger(__name__)
@@ -72,6 +73,17 @@ class ComposioService:
72
73
 
73
74
  response.raise_for_status()
74
75
  return response.json()
76
+ except requests.exceptions.HTTPError as e:
77
+ if e.response.status_code == 401:
78
+ raise NodeException(
79
+ message="Failed to authorize Composio request. Make sure to define a COMPOSIO_API_KEY",
80
+ code=WorkflowErrorCode.PROVIDER_CREDENTIALS_UNAVAILABLE,
81
+ )
82
+ else:
83
+ response_text = e.response.text if e.response else "No response"
84
+ raise NodeException(
85
+ f"Composio API request failed with status {e.response.status_code}: {response_text}"
86
+ )
75
87
  except Exception as e:
76
88
  raise NodeException(f"Composio API request failed: {e}")
77
89
 
@@ -123,16 +135,20 @@ class ComposioService:
123
135
  else:
124
136
  raise NodeException(f"Failed to retrieve tool details for '{tool_slug}': {error_message}")
125
137
 
126
- def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Any:
138
+ def execute_tool(self, tool_name: str, arguments: Dict[str, Any], user_id: Optional[str] = None) -> Any:
127
139
  """Execute a tool using direct API request
128
140
 
129
141
  Args:
130
142
  tool_name: The name of the tool to execute (e.g., "HACKERNEWS_GET_USER")
131
143
  arguments: Dictionary of arguments to pass to the tool
144
+ user_id: Optional user ID to identify which user's Composio connection to use
132
145
 
133
146
  Returns:
134
147
  The result of the tool execution
135
148
  """
136
149
  endpoint = f"/tools/execute/{tool_name}"
137
- response = self._make_request(endpoint, method="POST", json_data={"arguments": arguments})
150
+ json_data: Dict[str, Any] = {"arguments": arguments}
151
+ if user_id is not None:
152
+ json_data["user_id"] = user_id
153
+ response = self._make_request(endpoint, method="POST", json_data=json_data)
138
154
  return response.get("data", response)
@@ -0,0 +1,245 @@
1
+ import asyncio
2
+ import json
3
+ import logging
4
+ from typing import Any, Dict, List, Optional
5
+
6
+ import httpx
7
+
8
+ from vellum.workflows.constants import AuthorizationType
9
+ from vellum.workflows.errors.types import WorkflowErrorCode
10
+ from vellum.workflows.exceptions import NodeException
11
+ from vellum.workflows.types.core import VellumSecret
12
+ from vellum.workflows.types.definition import MCPServer, MCPToolDefinition
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class MCPHttpClient:
18
+ """
19
+ Direct HTTP implementation for MCP (Model Context Protocol) client
20
+ without using the official Python SDK.
21
+
22
+ Supports Streamable HTTP transport using httpx.
23
+ """
24
+
25
+ def __init__(self, server_url: str, headers: Dict[str, str], session_timeout: int = 30):
26
+ """
27
+ Initialize MCP HTTP client.
28
+
29
+ Args:
30
+ server_url: The MCP server endpoint URL (e.g., "https://example.com/mcp")
31
+ headers: Authentication headers
32
+ session_timeout: Timeout for HTTP requests in seconds
33
+ """
34
+ self.server_url = server_url.rstrip("/")
35
+ self.headers = headers
36
+ self.session_timeout = session_timeout
37
+ self.session_id: Optional[str] = None
38
+ self.request_id = 0
39
+ self._client: Optional[httpx.AsyncClient] = None
40
+
41
+ async def __aenter__(self):
42
+ """Async context manager entry."""
43
+ self._client = httpx.AsyncClient(timeout=httpx.Timeout(self.session_timeout), headers=self.headers)
44
+ return self
45
+
46
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
47
+ """Async context manager exit."""
48
+ if self._client:
49
+ await self._client.aclose()
50
+
51
+ def _next_request_id(self) -> int:
52
+ """Generate next request ID."""
53
+ self.request_id += 1
54
+ return self.request_id
55
+
56
+ async def _send_request(self, method: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
57
+ """
58
+ Send a JSON-RPC request to the MCP server.
59
+
60
+ Args:
61
+ method: The JSON-RPC method name
62
+ params: Optional parameters for the method
63
+
64
+ Returns:
65
+ The JSON-RPC response
66
+ """
67
+ if not self._client:
68
+ raise RuntimeError("Client session not initialized. Use 'async with' context manager.")
69
+
70
+ # Prepare JSON-RPC request
71
+ request_data = {"jsonrpc": "2.0", "id": self._next_request_id(), "method": method, "params": params or {}}
72
+
73
+ # Prepare headers
74
+ headers = {
75
+ "Content-Type": "application/json",
76
+ "Accept": "application/json",
77
+ }
78
+
79
+ # Include session ID if we have one
80
+ if self.session_id:
81
+ headers["Mcp-Session-Id"] = self.session_id
82
+
83
+ logger.debug(f"Sending request: {json.dumps(request_data, indent=2)}")
84
+
85
+ # Send POST request
86
+ response = await self._client.post(self.server_url, json=request_data, headers=headers)
87
+
88
+ # Check for session ID in response headers
89
+ if "Mcp-Session-Id" in response.headers:
90
+ self.session_id = response.headers["Mcp-Session-Id"]
91
+ logger.debug(f"Received session ID: {self.session_id}")
92
+
93
+ # Handle JSON response
94
+ response_data = response.json()
95
+ logger.debug(f"Received response: {json.dumps(response_data, indent=2)}")
96
+
97
+ if "error" in response_data:
98
+ raise Exception(f"MCP Error: {response_data['error']}")
99
+
100
+ return response_data
101
+
102
+ async def initialize(self, client_info: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
103
+ """
104
+ Initialize the MCP connection.
105
+
106
+ Args:
107
+ client_info: Optional client information
108
+
109
+ Returns:
110
+ Server capabilities and information
111
+ """
112
+ params = {
113
+ "protocolVersion": "2025-06-18",
114
+ "capabilities": {},
115
+ "clientInfo": client_info or {"name": "vellum-mcp-client", "version": "1.0.0"},
116
+ }
117
+
118
+ response = await self._send_request("initialize", params)
119
+ return response.get("result", {})
120
+
121
+ async def list_tools(self) -> List[Dict[str, Any]]:
122
+ """
123
+ Get list of available tools from the server.
124
+
125
+ Returns:
126
+ List of tool definitions
127
+ """
128
+ response = await self._send_request("tools/list")
129
+ return response.get("result", {}).get("tools", [])
130
+
131
+ async def call_tool(self, name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
132
+ """
133
+ Call a tool on the server.
134
+
135
+ Args:
136
+ name: Tool name
137
+ arguments: Tool arguments
138
+
139
+ Returns:
140
+ Tool execution result
141
+ """
142
+ params = {"name": name, "arguments": arguments}
143
+
144
+ response = await self._send_request("tools/call", params)
145
+ return response.get("result", {})
146
+
147
+
148
+ class MCPService:
149
+ def _get_auth_headers(self, server: MCPServer) -> Dict[str, str]:
150
+ headers = {}
151
+ if server.authorization_type == AuthorizationType.BEARER_TOKEN:
152
+ token = server.bearer_token_value
153
+ if not token:
154
+ raise NodeException(
155
+ "Bearer token is required for BEARER_TOKEN authorization type",
156
+ code=WorkflowErrorCode.INVALID_INPUTS,
157
+ )
158
+
159
+ headers["Authorization"] = f"Bearer {token}"
160
+
161
+ elif server.authorization_type == AuthorizationType.API_KEY:
162
+ key = server.api_key_header_key
163
+ value = server.api_key_header_value
164
+ if not key or not value:
165
+ raise NodeException(
166
+ "API key header key and value are required for API_KEY authorization type",
167
+ code=WorkflowErrorCode.INVALID_INPUTS,
168
+ )
169
+ if isinstance(value, VellumSecret):
170
+ headers[key] = value.name
171
+ elif isinstance(value, str):
172
+ headers[key] = value
173
+
174
+ return headers
175
+
176
+ async def _execute_mcp_call(self, server: MCPServer, operation: str, **kwargs) -> Any:
177
+ """Execute an MCP operation using direct HTTP calls."""
178
+ headers = self._get_auth_headers(server)
179
+
180
+ try:
181
+ async with MCPHttpClient(server.url, headers) as client:
182
+ await client.initialize()
183
+
184
+ if operation == "list_tools":
185
+ return await client.list_tools()
186
+ elif operation == "call_tool":
187
+ return await client.call_tool(
188
+ name=kwargs["name"],
189
+ arguments=kwargs["arguments"],
190
+ )
191
+ else:
192
+ raise ValueError(f"Unknown MCP operation: {operation}")
193
+
194
+ except Exception as e:
195
+ logger.error(f"Error executing MCP operation {operation}: {e}")
196
+ raise NodeException(
197
+ message=f"Error executing MCP operation '{operation}': {str(e)}",
198
+ code=WorkflowErrorCode.NODE_EXECUTION,
199
+ )
200
+
201
+ def list_tools(self, server: MCPServer) -> List[Dict[str, Any]]:
202
+ """List available tools from an MCP server."""
203
+ try:
204
+ tools = asyncio.run(self._execute_mcp_call(server, "list_tools"))
205
+ return tools
206
+ except Exception as e:
207
+ logger.warning(f"Failed to list tools from MCP server '{server.name}': {e}")
208
+ return []
209
+
210
+ def execute_tool(self, tool_def: MCPToolDefinition, arguments: Dict[str, Any]) -> Any:
211
+ """Execute a tool on an MCP server."""
212
+ try:
213
+ result = asyncio.run(
214
+ self._execute_mcp_call(
215
+ tool_def.server,
216
+ "call_tool",
217
+ name=tool_def.name,
218
+ arguments=arguments,
219
+ )
220
+ )
221
+ return result
222
+ except Exception as e:
223
+ logger.error(f"Error executing MCP tool '{tool_def.name}': {e}")
224
+ raise NodeException(
225
+ message=f"Error executing MCP tool '{tool_def.name}': {str(e)}",
226
+ code=WorkflowErrorCode.NODE_EXECUTION,
227
+ )
228
+
229
+ def hydrate_tool_definitions(self, server_def: MCPServer) -> List[MCPToolDefinition]:
230
+ """Hydrate an MCPToolDefinition with detailed information from the MCP server."""
231
+ try:
232
+ tools = self.list_tools(server_def)
233
+
234
+ return [
235
+ MCPToolDefinition(
236
+ name=tool["name"],
237
+ server=server_def,
238
+ description=tool["description"],
239
+ parameters=tool["inputSchema"],
240
+ )
241
+ for tool in tools
242
+ ]
243
+ except Exception as e:
244
+ logger.warning(f"Failed to hydrate MCP server '{server_def.name}': {e}")
245
+ return []
@@ -1,4 +1,4 @@
1
- from typing import Any, ClassVar, Dict, Iterator, List, Optional, Set, Union
1
+ from typing import Any, ClassVar, Dict, Generic, Iterator, List, Optional, Set, Union
2
2
 
3
3
  from vellum import ChatMessage, PromptBlock
4
4
  from vellum.client.types.prompt_parameters import PromptParameters
@@ -13,16 +13,21 @@ from vellum.workflows.nodes.bases import BaseNode
13
13
  from vellum.workflows.nodes.displayable.tool_calling_node.state import ToolCallingState
14
14
  from vellum.workflows.nodes.displayable.tool_calling_node.utils import (
15
15
  create_function_node,
16
+ create_mcp_tool_node,
16
17
  create_tool_router_node,
17
18
  get_function_name,
19
+ get_mcp_tool_name,
20
+ hydrate_mcp_tool_definitions,
18
21
  )
19
22
  from vellum.workflows.outputs.base import BaseOutput, BaseOutputs
20
23
  from vellum.workflows.state.context import WorkflowContext
21
24
  from vellum.workflows.types.core import EntityInputsInterface, Tool
25
+ from vellum.workflows.types.definition import MCPServer
26
+ from vellum.workflows.types.generics import StateType
22
27
  from vellum.workflows.workflows.event_filters import all_workflow_event_filter
23
28
 
24
29
 
25
- class ToolCallingNode(BaseNode):
30
+ class ToolCallingNode(BaseNode[StateType], Generic[StateType]):
26
31
  """
27
32
  A Node that dynamically invokes the provided functions to the underlying Prompt
28
33
 
@@ -141,12 +146,22 @@ class ToolCallingNode(BaseNode):
141
146
 
142
147
  self._function_nodes = {}
143
148
  for function in self.functions:
144
- function_name = get_function_name(function)
145
-
146
- self._function_nodes[function_name] = create_function_node(
147
- function=function,
148
- tool_router_node=self.tool_router_node,
149
- )
149
+ if isinstance(function, MCPServer):
150
+ tool_definitions = hydrate_mcp_tool_definitions(function)
151
+ for tool_definition in tool_definitions:
152
+ function_name = get_mcp_tool_name(tool_definition)
153
+
154
+ self._function_nodes[function_name] = create_mcp_tool_node(
155
+ tool_def=tool_definition,
156
+ tool_router_node=self.tool_router_node,
157
+ )
158
+ else:
159
+ function_name = get_function_name(function)
160
+
161
+ self._function_nodes[function_name] = create_function_node(
162
+ function=function,
163
+ tool_router_node=self.tool_router_node,
164
+ )
150
165
 
151
166
  graph_set = set()
152
167
 
@@ -125,3 +125,46 @@ class TestComposioCoreService:
125
125
  timeout=30,
126
126
  )
127
127
  assert result == {"items": [], "total": 0}
128
+
129
+ def test_execute_tool_with_user_id(self, composio_service, mock_requests, mock_tool_execution_response):
130
+ """Test executing a tool with user_id parameter"""
131
+ # GIVEN a user_id and tool arguments
132
+ user_id = "test_user_123"
133
+ tool_args = {"param1": "value1"}
134
+ mock_response = Mock()
135
+ mock_response.json.return_value = mock_tool_execution_response
136
+ mock_response.raise_for_status.return_value = None
137
+ mock_requests.post.return_value = mock_response
138
+
139
+ # WHEN we execute a tool with user_id
140
+ result = composio_service.execute_tool("TEST_TOOL", tool_args, user_id=user_id)
141
+
142
+ # THEN the user_id should be included in the request payload
143
+ mock_requests.post.assert_called_once_with(
144
+ "https://backend.composio.dev/api/v3/tools/execute/TEST_TOOL",
145
+ headers={"x-api-key": "test-key", "Content-Type": "application/json"},
146
+ json={"arguments": tool_args, "user_id": user_id},
147
+ timeout=30,
148
+ )
149
+ assert result == {"items": [], "total": 0}
150
+
151
+ def test_execute_tool_without_user_id(self, composio_service, mock_requests, mock_tool_execution_response):
152
+ """Test executing a tool without user_id parameter maintains backward compatibility"""
153
+ # GIVEN tool arguments without user_id
154
+ tool_args = {"param1": "value1"}
155
+ mock_response = Mock()
156
+ mock_response.json.return_value = mock_tool_execution_response
157
+ mock_response.raise_for_status.return_value = None
158
+ mock_requests.post.return_value = mock_response
159
+
160
+ # WHEN we execute a tool without user_id
161
+ result = composio_service.execute_tool("TEST_TOOL", tool_args)
162
+
163
+ # THEN the user_id should NOT be included in the request payload
164
+ mock_requests.post.assert_called_once_with(
165
+ "https://backend.composio.dev/api/v3/tools/execute/TEST_TOOL",
166
+ headers={"x-api-key": "test-key", "Content-Type": "application/json"},
167
+ json={"arguments": tool_args},
168
+ timeout=30,
169
+ )
170
+ assert result == {"items": [], "total": 0}
@@ -211,3 +211,25 @@ def test_tool_calling_node_with_user_provided_chat_history_block(vellum_adhoc_pr
211
211
  ]
212
212
  assert len(chat_history_inputs) == 1
213
213
  assert chat_history_inputs[0].value == [ChatMessage(role="USER", text="Hello from user")]
214
+
215
+
216
+ def test_tool_calling_node_with_generic_type_parameter():
217
+ # GIVEN a custom state class
218
+ class State(BaseState):
219
+ pass
220
+
221
+ # AND a ToolCallingNode that uses the generic type parameter
222
+ class TestToolCallingNode(ToolCallingNode[State]):
223
+ ml_model = "gpt-4o-mini"
224
+ blocks = []
225
+ functions = [first_function]
226
+ max_prompt_iterations = 1
227
+
228
+ # WHEN we create an instance of the node
229
+ state = State()
230
+ node = TestToolCallingNode(state=state)
231
+
232
+ # THEN the node should be created successfully
233
+ assert node is not None
234
+ assert isinstance(node, TestToolCallingNode)
235
+ assert node.state == state
@@ -12,7 +12,11 @@ from vellum.prompts.constants import DEFAULT_PROMPT_PARAMETERS
12
12
  from vellum.workflows import BaseWorkflow
13
13
  from vellum.workflows.inputs.base import BaseInputs
14
14
  from vellum.workflows.nodes.bases import BaseNode
15
- from vellum.workflows.nodes.displayable.tool_calling_node.utils import create_tool_router_node, get_function_name
15
+ from vellum.workflows.nodes.displayable.tool_calling_node.utils import (
16
+ create_tool_router_node,
17
+ get_function_name,
18
+ get_mcp_tool_name,
19
+ )
16
20
  from vellum.workflows.outputs.base import BaseOutputs
17
21
  from vellum.workflows.state.base import BaseState
18
22
  from vellum.workflows.types.definition import ComposioToolDefinition, DeploymentDefinition, MCPServer, MCPToolDefinition
@@ -77,9 +81,9 @@ def test_get_function_name_mcp_tool_definition():
77
81
  parameters={"repository_name": "string", "description": "string"},
78
82
  )
79
83
 
80
- result = get_function_name(mcp_tool)
84
+ result = get_mcp_tool_name(mcp_tool)
81
85
 
82
- assert result == "create_repository"
86
+ assert result == "github__create_repository"
83
87
 
84
88
 
85
89
  @pytest.mark.parametrize(
@@ -93,7 +97,7 @@ def test_get_function_name_composio_tool_definition_various_toolkits(
93
97
  toolkit: str, action: str, description: str, expected_result: str
94
98
  ):
95
99
  """Test ComposioToolDefinition function name generation with various toolkits."""
96
- composio_tool = ComposioToolDefinition(toolkit=toolkit, action=action, description=description)
100
+ composio_tool = ComposioToolDefinition(toolkit=toolkit, action=action, description=description, user_id=None)
97
101
 
98
102
  result = get_function_name(composio_tool)
99
103
 
@@ -17,6 +17,7 @@ from vellum.workflows.exceptions import NodeException
17
17
  from vellum.workflows.expressions.concat import ConcatExpression
18
18
  from vellum.workflows.inputs import BaseInputs
19
19
  from vellum.workflows.integrations.composio_service import ComposioService
20
+ from vellum.workflows.integrations.mcp_service import MCPService
20
21
  from vellum.workflows.nodes.bases import BaseNode
21
22
  from vellum.workflows.nodes.core.inline_subworkflow_node.node import InlineSubworkflowNode
22
23
  from vellum.workflows.nodes.displayable.inline_prompt_node.node import InlinePromptNode
@@ -27,8 +28,8 @@ from vellum.workflows.ports.port import Port
27
28
  from vellum.workflows.references.lazy import LazyReference
28
29
  from vellum.workflows.state import BaseState
29
30
  from vellum.workflows.state.encoder import DefaultStateEncoder
30
- from vellum.workflows.types.core import EntityInputsInterface, MergeBehavior, Tool
31
- from vellum.workflows.types.definition import ComposioToolDefinition, DeploymentDefinition, MCPToolDefinition
31
+ from vellum.workflows.types.core import EntityInputsInterface, MergeBehavior, Tool, ToolBase
32
+ from vellum.workflows.types.definition import ComposioToolDefinition, DeploymentDefinition, MCPServer, MCPToolDefinition
32
33
  from vellum.workflows.types.generics import is_workflow_class
33
34
 
34
35
  CHAT_HISTORY_VARIABLE = "chat_history"
@@ -183,7 +184,12 @@ class ComposioNode(BaseNode[ToolCallingState], FunctionCallNodeMixin):
183
184
  try:
184
185
  # Execute using ComposioService
185
186
  composio_service = ComposioService()
186
- result = composio_service.execute_tool(tool_name=self.composio_tool.action, arguments=arguments)
187
+ if self.composio_tool.user_id is not None:
188
+ result = composio_service.execute_tool(
189
+ tool_name=self.composio_tool.action, arguments=arguments, user_id=self.composio_tool.user_id
190
+ )
191
+ else:
192
+ result = composio_service.execute_tool(tool_name=self.composio_tool.action, arguments=arguments)
187
193
  except Exception as e:
188
194
  raise NodeException(
189
195
  message=f"Error executing Composio tool '{self.composio_tool.action}': {str(e)}",
@@ -196,6 +202,29 @@ class ComposioNode(BaseNode[ToolCallingState], FunctionCallNodeMixin):
196
202
  yield from []
197
203
 
198
204
 
205
+ class MCPNode(BaseNode[ToolCallingState], FunctionCallNodeMixin):
206
+ """Node that executes an MCP tool with function call output."""
207
+
208
+ mcp_tool: MCPToolDefinition
209
+
210
+ def run(self) -> Iterator[BaseOutput]:
211
+ arguments = self._extract_function_arguments()
212
+
213
+ try:
214
+ mcp_service = MCPService()
215
+ result = mcp_service.execute_tool(tool_def=self.mcp_tool, arguments=arguments)
216
+ except Exception as e:
217
+ raise NodeException(
218
+ message=f"Error executing MCP tool '{self.mcp_tool.name}': {str(e)}",
219
+ code=WorkflowErrorCode.NODE_EXECUTION,
220
+ )
221
+
222
+ # Add result to chat history
223
+ self._add_function_result_to_chat_history(result, self.state)
224
+
225
+ yield from []
226
+
227
+
199
228
  def _hydrate_composio_tool_definition(tool_def: ComposioToolDefinition) -> ComposioToolDefinition:
200
229
  """Hydrate a ComposioToolDefinition with detailed information from the Composio API.
201
230
 
@@ -225,6 +254,7 @@ def _hydrate_composio_tool_definition(tool_def: ComposioToolDefinition) -> Compo
225
254
  parameters=tool_details.get("input_parameters", tool_def.parameters),
226
255
  version=tool_details.get("version", tool_def.version),
227
256
  tags=tool_details.get("tags", tool_def.tags),
257
+ user_id=tool_def.user_id,
228
258
  )
229
259
 
230
260
  except Exception as e:
@@ -233,6 +263,26 @@ def _hydrate_composio_tool_definition(tool_def: ComposioToolDefinition) -> Compo
233
263
  return tool_def
234
264
 
235
265
 
266
+ def hydrate_mcp_tool_definitions(server_def: MCPServer) -> List[MCPToolDefinition]:
267
+ """Hydrate an MCPToolDefinition with detailed information from the MCP server.
268
+
269
+ We do tool discovery on the MCP server to get the tool definitions.
270
+
271
+ Args:
272
+ tool_def: The basic MCPToolDefinition to enhance
273
+
274
+ Returns:
275
+ MCPToolDefinition with detailed parameters and description
276
+ """
277
+ try:
278
+ mcp_service = MCPService()
279
+ return mcp_service.hydrate_tool_definitions(server_def)
280
+ except Exception as e:
281
+ # If hydration fails, log and return original
282
+ logger.warning(f"Failed to enhance MCP server '{server_def.name}': {e}")
283
+ return []
284
+
285
+
236
286
  def create_tool_router_node(
237
287
  ml_model: str,
238
288
  blocks: List[Union[PromptBlock, Dict[str, Any]]],
@@ -246,6 +296,19 @@ def create_tool_router_node(
246
296
  Ports = type("Ports", (), {})
247
297
  prompt_functions: List[Union[Tool, FunctionDefinition]] = []
248
298
 
299
+ # Avoid using lambda to capture function_name
300
+ # lambda will capture the function_name by reference,
301
+ # and if the function_name is changed, the port_condition will also change.
302
+ def create_port_condition(fn_name):
303
+ return Port.on_if(
304
+ LazyReference(
305
+ lambda: (
306
+ node.Outputs.results[0]["type"].equals("FUNCTION_CALL")
307
+ & node.Outputs.results[0]["value"]["name"].equals(fn_name)
308
+ )
309
+ )
310
+ )
311
+
249
312
  for function in functions:
250
313
  if isinstance(function, ComposioToolDefinition):
251
314
  # Get Composio tool details and hydrate the function definition
@@ -257,26 +320,28 @@ def create_tool_router_node(
257
320
  parameters=enhanced_function.parameters,
258
321
  )
259
322
  )
323
+ # Create port for this function (using original function for get_function_name)
324
+ function_name = get_function_name(function)
325
+ port = create_port_condition(function_name)
326
+ setattr(Ports, function_name, port)
327
+ elif isinstance(function, MCPServer):
328
+ tool_functions: List[MCPToolDefinition] = hydrate_mcp_tool_definitions(function)
329
+ for tool_function in tool_functions:
330
+ name = get_mcp_tool_name(tool_function)
331
+ prompt_functions.append(
332
+ FunctionDefinition(
333
+ name=name,
334
+ description=tool_function.description,
335
+ parameters=tool_function.parameters,
336
+ )
337
+ )
338
+ port = create_port_condition(name)
339
+ setattr(Ports, name, port)
260
340
  else:
261
341
  prompt_functions.append(function)
262
-
263
- # Create port for this function (using original function for get_function_name)
264
- function_name = get_function_name(function)
265
-
266
- # Avoid using lambda to capture function_name
267
- # lambda will capture the function_name by reference,
268
- # and if the function_name is changed, the port_condition will also change.
269
- def create_port_condition(fn_name):
270
- return LazyReference(
271
- lambda: (
272
- node.Outputs.results[0]["type"].equals("FUNCTION_CALL")
273
- & node.Outputs.results[0]["value"]["name"].equals(fn_name)
274
- )
275
- )
276
-
277
- port_condition = create_port_condition(function_name)
278
- port = Port.on_if(port_condition)
279
- setattr(Ports, function_name, port)
342
+ function_name = get_function_name(function)
343
+ port = create_port_condition(function_name)
344
+ setattr(Ports, function_name, port)
280
345
 
281
346
  # Add the else port for when no function conditions match
282
347
  setattr(Ports, "default", Port.on_else())
@@ -338,7 +403,7 @@ def create_tool_router_node(
338
403
 
339
404
 
340
405
  def create_function_node(
341
- function: Tool,
406
+ function: ToolBase,
342
407
  tool_router_node: Type[ToolRouterNode],
343
408
  ) -> Type[BaseNode]:
344
409
  """
@@ -379,8 +444,6 @@ def create_function_node(
379
444
  },
380
445
  )
381
446
  return node
382
- elif isinstance(function, MCPToolDefinition):
383
- pass
384
447
  elif is_workflow_class(function):
385
448
  node = type(
386
449
  f"DynamicInlineSubworkflowNode_{function.__name__}",
@@ -406,13 +469,31 @@ def create_function_node(
406
469
  return node
407
470
 
408
471
 
409
- def get_function_name(function: Tool) -> str:
472
+ def create_mcp_tool_node(
473
+ tool_def: MCPToolDefinition,
474
+ tool_router_node: Type[ToolRouterNode],
475
+ ) -> Type[BaseNode]:
476
+ node = type(
477
+ f"MCPNode_{tool_def.name}",
478
+ (MCPNode,),
479
+ {
480
+ "mcp_tool": tool_def,
481
+ "function_call_output": tool_router_node.Outputs.results,
482
+ "__module__": __name__,
483
+ },
484
+ )
485
+ return node
486
+
487
+
488
+ def get_function_name(function: ToolBase) -> str:
410
489
  if isinstance(function, DeploymentDefinition):
411
490
  name = str(function.deployment_id or function.deployment_name)
412
491
  return name.replace("-", "")
413
492
  elif isinstance(function, ComposioToolDefinition):
414
493
  return function.name
415
- elif isinstance(function, MCPToolDefinition):
416
- return function.name
417
494
  else:
418
495
  return snake_case(function.__name__)
496
+
497
+
498
+ def get_mcp_tool_name(tool_def: MCPToolDefinition) -> str:
499
+ return f"{tool_def.server.name}__{tool_def.name}"
@@ -13,7 +13,7 @@ from typing import ( # type: ignore[attr-defined]
13
13
  )
14
14
 
15
15
  from vellum.client.core.pydantic_utilities import UniversalBaseModel
16
- from vellum.workflows.types.definition import ComposioToolDefinition, DeploymentDefinition, MCPToolDefinition
16
+ from vellum.workflows.types.definition import ComposioToolDefinition, DeploymentDefinition, MCPServer
17
17
 
18
18
  if TYPE_CHECKING:
19
19
  from vellum.workflows.workflows.base import BaseWorkflow
@@ -41,6 +41,7 @@ class MergeBehavior(Enum):
41
41
  AWAIT_ALL = "AWAIT_ALL"
42
42
  AWAIT_ANY = "AWAIT_ANY"
43
43
  AWAIT_ATTRIBUTES = "AWAIT_ATTRIBUTES"
44
+ CUSTOM = "CUSTOM"
44
45
 
45
46
 
46
47
  class ConditionType(Enum):
@@ -50,4 +51,5 @@ class ConditionType(Enum):
50
51
 
51
52
 
52
53
  # Type alias for functions that can be called in tool calling nodes
53
- Tool = Union[Callable[..., Any], DeploymentDefinition, Type["BaseWorkflow"], ComposioToolDefinition, MCPToolDefinition]
54
+ ToolBase = Union[Callable[..., Any], DeploymentDefinition, Type["BaseWorkflow"], ComposioToolDefinition]
55
+ Tool = Union[ToolBase, MCPServer]
@@ -116,6 +116,7 @@ class ComposioToolDefinition(UniversalBaseModel):
116
116
  parameters: Optional[Dict[str, Any]] = None
117
117
  version: Optional[str] = None
118
118
  tags: Optional[List[str]] = None
119
+ user_id: Optional[str] = None
119
120
 
120
121
  @property
121
122
  def name(self) -> str:
@@ -39,7 +39,10 @@ def test_deployment_definition(deployment_value, expected_deployment_id, expecte
39
39
  def test_composio_tool_definition_creation():
40
40
  """Test that ComposioToolDefinition can be created with required fields."""
41
41
  composio_tool = ComposioToolDefinition(
42
- toolkit="GITHUB", action="GITHUB_CREATE_AN_ISSUE", description="Create a new issue in a GitHub repository"
42
+ toolkit="GITHUB",
43
+ action="GITHUB_CREATE_AN_ISSUE",
44
+ description="Create a new issue in a GitHub repository",
45
+ user_id=None,
43
46
  )
44
47
 
45
48
  assert composio_tool.toolkit == "GITHUB"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 1.0.8
3
+ Version: 1.0.10
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -8,7 +8,7 @@ vellum_cli/init.py,sha256=WpnMXPItPmh0f0bBGIer3p-e5gu8DUGwSArT_FuoMEw,5093
8
8
  vellum_cli/logger.py,sha256=dcM_OmgqXLo93vDYswO5ylyUQQcTfnA5GTd5tbIt3wM,1446
9
9
  vellum_cli/ping.py,sha256=p_BCCRjgPhng6JktuECtkDQLbhopt6JpmrtGoLnLJT8,1161
10
10
  vellum_cli/pull.py,sha256=udYyPlJ6VKDdh78rApNJOZgxHl82fcV6iGnRPSdX1LY,14750
11
- vellum_cli/push.py,sha256=hzBBD7Rc-11Dyu6_JfHeLI03c4XKAYQZZoq1SCNyXpM,11547
11
+ vellum_cli/push.py,sha256=5sGkWmwnuPI9mYlBFZO4Wc0WcErZAnFNXZYJByiAJGw,11620
12
12
  vellum_cli/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  vellum_cli/tests/conftest.py,sha256=wx3PlJjVB0HRf5dr2b_idOIw27WPPl0J0FNbhIJJaVk,1689
14
14
  vellum_cli/tests/test_config.py,sha256=uvKGDc8BoVyT9_H0Z-g8469zVxomn6Oi3Zj-vK7O_wU,2631
@@ -18,7 +18,7 @@ vellum_cli/tests/test_init.py,sha256=8UOc_ThfouR4ja5cCl_URuLk7ohr9JXfCnG4yka1OUQ
18
18
  vellum_cli/tests/test_main.py,sha256=qDZG-aQauPwBwM6A2DIu1494n47v3pL28XakTbLGZ-k,272
19
19
  vellum_cli/tests/test_ping.py,sha256=b3aQLd-N59_8w2rRiWqwpB1rlHaKEYVbAj1Y3hi7A-g,2605
20
20
  vellum_cli/tests/test_pull.py,sha256=hxMbW_j0weDDrkzVGpvLpFcwNQdn-fxTv4wBHeYizzc,49904
21
- vellum_cli/tests/test_push.py,sha256=H9ZU0_E-I1F98SBvj-I_1ooe3RzOlhMTTmEFg6CRrYY,40384
21
+ vellum_cli/tests/test_push.py,sha256=9whdm-i4d3scJeJ7snGrkCVKcDlZsq8WqH_Nnv9KAYk,40395
22
22
  vellum_ee/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  vellum_ee/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  vellum_ee/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -94,7 +94,7 @@ vellum_ee/workflows/display/tests/workflow_serialization/test_basic_search_node_
94
94
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_subworkflow_deployment_serialization.py,sha256=XWrhHg_acLsRHwjstBAii9Pmes9oXFtAUWSAVF1oSBc,11225
95
95
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_templating_node_serialization.py,sha256=V8b6gKghLlO7PJI8xeNdnfn8aII0W_IFQvSQBQM62UQ,7721
96
96
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_terminal_node_serialization.py,sha256=hDWtKXmGI1CKhTwTNqpu_d5RkE5n7SolMLtgd87KqTI,3856
97
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_composio_serialization.py,sha256=gonapBCyDDt3qc7U02PCuKyPS8f3YiSAZ7QD86CH1Fw,3794
97
+ vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_composio_serialization.py,sha256=sWKSqw1B4iAEamIzbRJBfjtMCy_D54gxEu3aPSDrS_o,3819
98
98
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_serialization.py,sha256=4t1lkN2nsZF6lFqP6QnskUQWJlhasF8C2_f6atzk8ZY,26298
99
99
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py,sha256=B0rDsCvO24qPp0gkmj8SdTDY5CxZYkvKwknsKBuAPyA,10017
100
100
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_workflow_deployment_serialization.py,sha256=mova0sPD3evHiHIN1O0VynxlCp-uOcEIKve5Pd_oCDg,4069
@@ -145,7 +145,7 @@ vellum/client/README.md,sha256=Dle5iytCXxP1pNeNd7uZyhFo0rl7tp7vU7s8gmi10OQ,4863
145
145
  vellum/client/__init__.py,sha256=KmkyOgReuTsjmXF3WC_dPQ9QqJgYrB3Sr8_LcSUIQyI,125258
146
146
  vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
147
147
  vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
148
- vellum/client/core/client_wrapper.py,sha256=-AFplaIvuzgdfFXfodBM9oR_i0p48jofC_DkscIsYRE,2383
148
+ vellum/client/core/client_wrapper.py,sha256=PYKT4GSWVfnUCdamESx8hYo2KF9SVuaYQAHwwHxyYSI,2385
149
149
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
150
150
  vellum/client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
151
151
  vellum/client/core/http_client.py,sha256=cKs2w0ybDBk1wHQf-fTALm_MmvaMe3cZKcYJxqmCxkE,19539
@@ -210,7 +210,7 @@ vellum/client/resources/workflow_sandboxes/client.py,sha256=tpIjyuvVAnghmUu_gI9l
210
210
  vellum/client/resources/workflow_sandboxes/types/__init__.py,sha256=EaGVRU1w6kJiiHrbZOeEa0c3ggjfgv_jBqsyOkCRWOI,212
211
211
  vellum/client/resources/workflow_sandboxes/types/list_workflow_sandbox_examples_request_tag.py,sha256=TEwWit20W3X-zWPPLAhmUG05UudG9gaBSJ4Q4-rNJws,188
212
212
  vellum/client/resources/workflows/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
213
- vellum/client/resources/workflows/client.py,sha256=a3DwkUn8FBve-Wr139MSms18EPZPdnTkDsMqyTUQEYc,10725
213
+ vellum/client/resources/workflows/client.py,sha256=rU5fJmVebWO6SoK5S9RzJHM0-hucRKnDt24tvG-DwEM,11037
214
214
  vellum/client/resources/workspace_secrets/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
215
215
  vellum/client/resources/workspace_secrets/client.py,sha256=l1FOj0f-IB5_oQ7iWiHopFK3lDXBqiaIc9g10W9PHFU,8381
216
216
  vellum/client/resources/workspaces/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -295,7 +295,7 @@ vellum/client/types/deployment_release_tag_read.py,sha256=dUrTOz9LH1gAvC_ktMB_7N
295
295
  vellum/client/types/docker_service_token.py,sha256=T0icNHBKsIs6TrEiDRjckM_f37hcF1DMwEE8161tTvY,614
296
296
  vellum/client/types/document_chat_message_content.py,sha256=MiVYuMKtRUaT6_ve1MzihChb10SrOt_0VhpCB0x7hFQ,745
297
297
  vellum/client/types/document_chat_message_content_request.py,sha256=wMzj1SREQUeiSqZhOoLOOQzn6hmO_GDOMICDQ4fEXzs,774
298
- vellum/client/types/document_document_to_document_index.py,sha256=IDfBoch4lYL-J1XlUNSAtV7elJ6l5jX5peboYsriGjQ,1580
298
+ vellum/client/types/document_document_to_document_index.py,sha256=Yeb84grNv-T9nI2oFjMwioPHUEH2TLREaqhldWMYwhs,1609
299
299
  vellum/client/types/document_index_chunking.py,sha256=TU0Y7z0Xacm3dhzEDuDIG3ZKJCu3vNURRh3PqEd17mY,356
300
300
  vellum/client/types/document_index_chunking_request.py,sha256=g9BKCsHKg5kzjG7YYeMNQ_5R8TXLeSgumJlMXoSfBcs,435
301
301
  vellum/client/types/document_index_indexing_config.py,sha256=xL1pCzUOkw5sSie1OrBpasE3bVnv0UyZBn7uZztbhbs,781
@@ -500,7 +500,7 @@ vellum/client/types/open_ai_vectorizer_text_embedding_3_small.py,sha256=T_-P7qGj
500
500
  vellum/client/types/open_ai_vectorizer_text_embedding_3_small_request.py,sha256=-lwNeWj7ExP-JLncUp1Uyd20FxweVIDu-aEnenPB98A,841
501
501
  vellum/client/types/open_ai_vectorizer_text_embedding_ada_002.py,sha256=c4vNlR6lRvUjq-67M06sroDMNMG_qC4JUBqwmKEJQ2I,812
502
502
  vellum/client/types/open_ai_vectorizer_text_embedding_ada_002_request.py,sha256=FdpkkNBGgRwfqFjBwpfH4t2zKIM0pIYminX2iZQUzvY,841
503
- vellum/client/types/organization_read.py,sha256=QDFpX4pZCjGSRXZ6FF65SDzRxFqkI87DEEUXtaVoTAs,837
503
+ vellum/client/types/organization_read.py,sha256=AMTjqWH6CWXzbPpjoOE9Usb0lsqepC_vBfqKjLxHcik,908
504
504
  vellum/client/types/paginated_container_image_read_list.py,sha256=7lwIgs1q7Z0xDYPGWPnjSNC1kU_peu79CotzaaQfRdA,801
505
505
  vellum/client/types/paginated_deployment_release_tag_read_list.py,sha256=hp7D74CxPY14dEPRZ-fnTCwp63upxkYquL1e74oYXh4,826
506
506
  vellum/client/types/paginated_document_index_read_list.py,sha256=bO7pm3KCZi5LDO17YXgr_lUF9SRdAfMu6wOutX91ANw,797
@@ -589,7 +589,7 @@ vellum/client/types/sentence_chunking.py,sha256=guqU3072X4h8Laf6LhTWQ5lpjBpTgoXR
589
589
  vellum/client/types/sentence_chunking_request.py,sha256=77gv1fVc9IaTuGGx3O1HB0LF9sXM5pSTWksl8BEmvLU,812
590
590
  vellum/client/types/slim_deployment_read.py,sha256=Gr3wWT1eJKe33MVsW0O0pSIgUF_LE6WFbvFbf5_CVb8,1654
591
591
  vellum/client/types/slim_document.py,sha256=HJiymYPvRxfxhBUkD8epW0hQ2Vt9PQtv398QsRb4DsI,2395
592
- vellum/client/types/slim_document_document_to_document_index.py,sha256=IG8gzxWO32fzjd6bQn5ZyBoIlAGpkx1RNgjsFaWUXSY,1522
592
+ vellum/client/types/slim_document_document_to_document_index.py,sha256=0LThZhOgIE2kSmIWH9s59_6pEzGRWpx5qS5BBuWXdLk,1551
593
593
  vellum/client/types/slim_release_review.py,sha256=7DXmD1AVa_Wj7e0qiR7GUN9cSqwkk1JloYmp_3oluQQ,783
594
594
  vellum/client/types/slim_workflow_deployment.py,sha256=Js-ycMFZ8-kNFPsd4bZew9nI_eN2M_58LzDHeCjkfTg,2009
595
595
  vellum/client/types/slim_workflow_execution_read.py,sha256=Opm1HTYVMz_D2USQCB-5ZoJ4EjKKfrDhoXc0hETldVM,1936
@@ -844,7 +844,7 @@ vellum/evaluations/utils/env.py,sha256=Xj_nxsoU5ox06EOTjRopR4lrigQI6Le6qbWGltYoE
844
844
  vellum/evaluations/utils/exceptions.py,sha256=dXMAkzqbHV_AP5FjjbegPlfUE0zQDlpA3qOsoOJUxfg,49
845
845
  vellum/evaluations/utils/paginator.py,sha256=rEED_BJAXAM6tM1yMwHePNzszjq_tTq4NbQvi1jWQ_Q,697
846
846
  vellum/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
847
- vellum/plugins/pydantic.py,sha256=GmNsxupKskbqpe4N5NBmSnLo680EATqhXJHABgf1NO0,3727
847
+ vellum/plugins/pydantic.py,sha256=SamPlRZ8V9kuxEfMkOPKjhMMLa5Q3wYJ3Z-F_IfKtvA,3911
848
848
  vellum/plugins/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
849
849
  vellum/plugins/tests/test_pydantic.py,sha256=S6bLqs3Y5DGA012QV_7f6hk4e6Bz-iJ9py9DEKuo4fM,746
850
850
  vellum/plugins/utils.py,sha256=cPmxE9R2CK1bki2jKE8rB-G9zMf2pzHjSPDHFPXwd3Q,878
@@ -1536,8 +1536,9 @@ vellum/workflows/descriptors/tests/test_utils.py,sha256=HJ5DoRz0sJvViGxyZ_FtytZj
1536
1536
  vellum/workflows/descriptors/utils.py,sha256=1siECBf6AI54gwwUwkF6mP9rYsRryUGaOYBbMpQaceM,3848
1537
1537
  vellum/workflows/edges/__init__.py,sha256=wSkmAnz9xyi4vZwtDbKxwlplt2skD7n3NsxkvR_pUus,50
1538
1538
  vellum/workflows/edges/edge.py,sha256=N0SnY3gKVuxImPAdCbPMPlHJIXbkQ3fwq_LbJRvVMFc,677
1539
- vellum/workflows/emitters/__init__.py,sha256=YyOgaoLtVW8eFNEWODzCYb0HzL0PoSeNRf4diJ1Y0dk,80
1540
- vellum/workflows/emitters/base.py,sha256=D5SADKIvnbgKwIBgYm77jaqvpo1o0rz4MmuX_muRqQU,359
1539
+ vellum/workflows/emitters/__init__.py,sha256=d9QFOI3eVg6rzpSFLvrjkDYXWikf1tcp3ruTRa2Boyc,143
1540
+ vellum/workflows/emitters/base.py,sha256=Tcp13VMB-GMwEJdl-6XTPckspdOdwpMgBx22-PcQxds,892
1541
+ vellum/workflows/emitters/vellum_emitter.py,sha256=VRJgyEs6RnikwlPBUu1s7BD8flVeuM3QgTeQLUnaDuE,5051
1541
1542
  vellum/workflows/environment/__init__.py,sha256=TJz0m9dwIs6YOwCTeuN0HHsU-ecyjc1OJXx4AFy83EQ,121
1542
1543
  vellum/workflows/environment/environment.py,sha256=Ck3RPKXJvtMGx_toqYQQQF-ZwXm5ijVwJpEPTeIJ4_Q,471
1543
1544
  vellum/workflows/errors/__init__.py,sha256=tWGPu5xyAU8gRb8_bl0fL7OfU3wxQ9UH6qVwy4X4P_Q,113
@@ -1597,7 +1598,8 @@ vellum/workflows/inputs/base.py,sha256=w3owT5B3rLBmIj-v-jL2l-HD4yd3hXK9RmHVd557B
1597
1598
  vellum/workflows/inputs/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1598
1599
  vellum/workflows/inputs/tests/test_inputs.py,sha256=lioA8917mFLYq7Ml69UNkqUjcWbbxkxnpIEJ4FBaYBk,2206
1599
1600
  vellum/workflows/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1600
- vellum/workflows/integrations/composio_service.py,sha256=p3V9l7OlOV1zZK9aIgvfhLiDsxbxeSVZJ3MBSedo7Pw,5008
1601
+ vellum/workflows/integrations/composio_service.py,sha256=v1rVQXTh1rnupguj8oIM20V7bSKaJiAoJ5yjz2NeKA8,5906
1602
+ vellum/workflows/integrations/mcp_service.py,sha256=SaOLg76JBAiBDAMUn04mxVWmf2Btobd1kDjc8B1atng,8712
1601
1603
  vellum/workflows/logging.py,sha256=_a217XogktV4Ncz6xKFz7WfYmZAzkfVRVuC0rWob8ls,437
1602
1604
  vellum/workflows/nodes/__init__.py,sha256=aVdQVv7Y3Ro3JlqXGpxwaU2zrI06plDHD2aumH5WUIs,1157
1603
1605
  vellum/workflows/nodes/bases/__init__.py,sha256=cniHuz_RXdJ4TQgD8CBzoiKDiPxg62ErdVpCbWICX64,58
@@ -1695,13 +1697,13 @@ vellum/workflows/nodes/displayable/tests/test_search_node_error_handling.py,sha2
1695
1697
  vellum/workflows/nodes/displayable/tests/test_search_node_wth_text_output.py,sha256=VepO5z1277c1y5N6LLIC31nnWD1aak2m5oPFplfJHHs,6935
1696
1698
  vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py,sha256=dc3EEn1sOICpr3GdS8eyeFtExaGwWWcw9eHSdkRhQJU,2584
1697
1699
  vellum/workflows/nodes/displayable/tool_calling_node/__init__.py,sha256=3n0-ysmFKsr40CVxPthc0rfJgqVJeZuUEsCmYudLVRg,117
1698
- vellum/workflows/nodes/displayable/tool_calling_node/node.py,sha256=KRI1NMgXZTUgQqq9uOA9W_D8k8sy7ZAq6v53-YVno1k,6545
1700
+ vellum/workflows/nodes/displayable/tool_calling_node/node.py,sha256=ftPf7hmPvk_rJeIoxnJGkTLex-kDW1CRuvVDUwUdMxg,7283
1699
1701
  vellum/workflows/nodes/displayable/tool_calling_node/state.py,sha256=oQg_GAtc349nPB5BL_oeDYYD7q1qSDPAqjj8iA8OoAw,215
1700
1702
  vellum/workflows/nodes/displayable/tool_calling_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1701
- vellum/workflows/nodes/displayable/tool_calling_node/tests/test_composio_service.py,sha256=UV0vZpU7-_tHcwnIq36WKwHrJXNurU4bdC3rfaw8eoU,4804
1702
- vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py,sha256=raY_E5-EgtYNXEPbO2I-Ythe4YeuFdGsXGZ_BAN98uI,7979
1703
- vellum/workflows/nodes/displayable/tool_calling_node/tests/test_utils.py,sha256=prl8GRwSBOgIorjBJkaYrp6XJjXNuaoedg3Lxt269j0,8303
1704
- vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=rj9UTNDOi3sx_Rbc4DfBOtBpMTHut9piwlALN9n0zE0,16706
1703
+ vellum/workflows/nodes/displayable/tool_calling_node/tests/test_composio_service.py,sha256=y7KAqbiJHoya6N5EWv1qgz0htM_Yzz7zjAHVp78IMFo,6919
1704
+ vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py,sha256=TPafJhCAV6oLg5kTttQw0hL56ct3a2Xatvnld6dK8CY,8628
1705
+ vellum/workflows/nodes/displayable/tool_calling_node/tests/test_utils.py,sha256=om4FztVQ33jFZK_lbusi6khOM7zgzNCHlUcEb5-r6pU,8361
1706
+ vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=fvy0O3YpibWUpw4aLKnk8PdwlRCJC7Z2acjryOTiuxY,19728
1705
1707
  vellum/workflows/nodes/experimental/README.md,sha256=eF6DfIL8t-HbF9-mcofOMymKrraiBHDLKTlnBa51ZiE,284
1706
1708
  vellum/workflows/nodes/experimental/__init__.py,sha256=jCQgvZEknXKfuNhGSOou4XPfrPqZ1_XBj5F0n0fgiWM,106
1707
1709
  vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py,sha256=lsyD9laR9p7kx5-BXGH2gUTM242UhKy8SMV0SR6S2iE,90
@@ -1748,12 +1750,12 @@ vellum/workflows/tests/test_sandbox.py,sha256=JKwaluI-lODQo7Ek9sjDstjL_WTdSqUlVi
1748
1750
  vellum/workflows/tests/test_undefined.py,sha256=zMCVliCXVNLrlC6hEGyOWDnQADJ2g83yc5FIM33zuo8,353
1749
1751
  vellum/workflows/types/__init__.py,sha256=KxUTMBGzuRCfiMqzzsykOeVvrrkaZmTTo1a7SLu8gRM,68
1750
1752
  vellum/workflows/types/code_execution_node_wrappers.py,sha256=3MNIoFZKzVzNS5qFLVuDwMV17QJw72zo7NRf52yMq5A,3074
1751
- vellum/workflows/types/core.py,sha256=Vykj9o6fEnS13M1LwJDh9FVgua03acqigBqyYOiJiq8,1352
1752
- vellum/workflows/types/definition.py,sha256=pK0fAXHw7C0AFpCoM4WGe1_MD-usupF4-m6ldo5AQXY,4568
1753
+ vellum/workflows/types/core.py,sha256=TggDVs2lVya33xvu374EDhMC1b7RRlAAs0zWLaF46BA,1385
1754
+ vellum/workflows/types/definition.py,sha256=fzWfsfbXLS4sZvjOQMSDoiuSaFo4Ii2kC8AOiPade9o,4602
1753
1755
  vellum/workflows/types/generics.py,sha256=8jptbEx1fnJV0Lhj0MpCJOT6yNiEWeTOYOwrEAb5CRU,1576
1754
1756
  vellum/workflows/types/stack.py,sha256=h7NE0vXR7l9DevFBIzIAk1Zh59K-kECQtDTKOUunwMY,1314
1755
1757
  vellum/workflows/types/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1756
- vellum/workflows/types/tests/test_definition.py,sha256=RsDoicu8A1dqJOGa-Ok866K8lnzn5L0Hez3lQijYD4c,5011
1758
+ vellum/workflows/types/tests/test_definition.py,sha256=4Qqlf7GpoG9MrLuMCkcRzEZMgwrr7du4DROcB1xfv0E,5050
1757
1759
  vellum/workflows/types/tests/test_utils.py,sha256=UnZog59tR577mVwqZRqqWn2fScoOU1H6up0EzS8zYhw,2536
1758
1760
  vellum/workflows/types/utils.py,sha256=mTctHITBybpt4855x32oCKALBEcMNLn-9cCmfEKgJHQ,6498
1759
1761
  vellum/workflows/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1774,8 +1776,8 @@ vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnad
1774
1776
  vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1775
1777
  vellum/workflows/workflows/tests/test_base_workflow.py,sha256=ptMntHzVyy8ZuzNgeTuk7hREgKQ5UBdgq8VJFSGaW4Y,20832
1776
1778
  vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
1777
- vellum_ai-1.0.8.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1778
- vellum_ai-1.0.8.dist-info/METADATA,sha256=qMK12xNOQpaw7B4-wciOlrjn7mmS4zzPS9ALMXFIb_c,5554
1779
- vellum_ai-1.0.8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1780
- vellum_ai-1.0.8.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1781
- vellum_ai-1.0.8.dist-info/RECORD,,
1779
+ vellum_ai-1.0.10.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1780
+ vellum_ai-1.0.10.dist-info/METADATA,sha256=j-b34bQSbPMhl5YG7dLuwC4_EZYCtRrdhoTxjXYJ3-U,5555
1781
+ vellum_ai-1.0.10.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1782
+ vellum_ai-1.0.10.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1783
+ vellum_ai-1.0.10.dist-info/RECORD,,
vellum_cli/push.py CHANGED
@@ -67,7 +67,10 @@ def push_command(
67
67
  raise ValueError(f"No workflow config for '{module}' found in project to push.")
68
68
 
69
69
  if len(workflow_configs) > 1:
70
- raise ValueError("Multiple workflows found in project to push. Pushing only a single workflow is supported.")
70
+ raise ValueError(
71
+ "Multiple workflows found. Please specify a single workflow to push. "
72
+ f"Found: {', '.join([w.module for w in workflow_configs])}"
73
+ )
71
74
 
72
75
  workflow_config = workflow_configs[0]
73
76
 
@@ -77,9 +77,8 @@ def test_push__multiple_workflows_configured__no_module_specified(mock_module):
77
77
  # THEN it should fail
78
78
  assert result.exit_code == 1
79
79
  assert result.exception
80
- assert (
81
- str(result.exception)
82
- == "Multiple workflows found in project to push. Pushing only a single workflow is supported."
80
+ assert str(result.exception) == (
81
+ "Multiple workflows found. Please specify a single workflow to push. Found: examples.mock, examples.mock2"
83
82
  )
84
83
 
85
84
 
@@ -349,9 +348,8 @@ def test_push__workflow_sandbox_option__existing_id_different_module(mock_module
349
348
  # THEN it should fail
350
349
  assert result.exit_code == 1
351
350
  assert result.exception
352
- assert (
353
- str(result.exception)
354
- == "Multiple workflows found in project to push. Pushing only a single workflow is supported."
351
+ assert str(result.exception) == (
352
+ f"Multiple workflows found. Please specify a single workflow to push. Found: {module}, {second_module}"
355
353
  )
356
354
 
357
355
 
@@ -57,6 +57,7 @@ def test_serialize_workflow():
57
57
  "parameters": None,
58
58
  "version": None,
59
59
  "tags": None,
60
+ "user_id": None,
60
61
  }
61
62
 
62
63
  # AND the rest of the node structure should be correct