vellum-ai 1.7.10__py3-none-any.whl → 1.7.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vellum-ai might be problematic. Click here for more details.

Files changed (31) hide show
  1. vellum/client/core/client_wrapper.py +2 -2
  2. vellum/workflows/events/tests/test_event.py +1 -0
  3. vellum/workflows/events/workflow.py +3 -0
  4. vellum/workflows/exceptions.py +3 -0
  5. vellum/workflows/integrations/mcp_service.py +7 -0
  6. vellum/workflows/integrations/tests/test_mcp_service.py +48 -0
  7. vellum/workflows/loaders/__init__.py +3 -0
  8. vellum/workflows/loaders/base.py +21 -0
  9. vellum/workflows/tests/triggers/test_vellum_integration_trigger.py +225 -0
  10. vellum/workflows/triggers/__init__.py +2 -1
  11. vellum/workflows/triggers/vellum_integration.py +383 -0
  12. vellum/workflows/types/__init__.py +3 -0
  13. vellum/workflows/types/tests/test_utils.py +11 -0
  14. vellum/workflows/types/trigger_exec_config.py +63 -0
  15. vellum/workflows/types/utils.py +22 -0
  16. vellum/workflows/utils/names.py +20 -0
  17. vellum/workflows/workflows/base.py +13 -1
  18. {vellum_ai-1.7.10.dist-info → vellum_ai-1.7.11.dist-info}/METADATA +1 -1
  19. {vellum_ai-1.7.10.dist-info → vellum_ai-1.7.11.dist-info}/RECORD +31 -25
  20. vellum_cli/pull.py +6 -5
  21. vellum_cli/push.py +35 -2
  22. vellum_cli/tests/test_push.py +122 -0
  23. vellum_ee/workflows/display/tests/workflow_serialization/test_list_vellum_document_serialization.py +65 -0
  24. vellum_ee/workflows/display/utils/events.py +6 -3
  25. vellum_ee/workflows/display/utils/tests/test_events.py +29 -0
  26. vellum_ee/workflows/server/virtual_file_loader.py +15 -4
  27. vellum_ee/workflows/tests/test_serialize_module.py +48 -0
  28. vellum_ee/workflows/tests/test_server.py +105 -0
  29. {vellum_ai-1.7.10.dist-info → vellum_ai-1.7.11.dist-info}/LICENSE +0 -0
  30. {vellum_ai-1.7.10.dist-info → vellum_ai-1.7.11.dist-info}/WHEEL +0 -0
  31. {vellum_ai-1.7.10.dist-info → vellum_ai-1.7.11.dist-info}/entry_points.txt +0 -0
@@ -27,10 +27,10 @@ class BaseClientWrapper:
27
27
 
28
28
  def get_headers(self) -> typing.Dict[str, str]:
29
29
  headers: typing.Dict[str, str] = {
30
- "User-Agent": "vellum-ai/1.7.10",
30
+ "User-Agent": "vellum-ai/1.7.11",
31
31
  "X-Fern-Language": "Python",
32
32
  "X-Fern-SDK-Name": "vellum-ai",
33
- "X-Fern-SDK-Version": "1.7.10",
33
+ "X-Fern-SDK-Version": "1.7.11",
34
34
  **(self.get_custom_headers() or {}),
35
35
  }
36
36
  if self._api_version is not None:
@@ -93,6 +93,7 @@ mock_node_uuid = str(uuid4_from_hash(MockNode.__qualname__))
93
93
  "display_context": None,
94
94
  "initial_state": None,
95
95
  "workflow_version_exec_config": None,
96
+ "server_metadata": None,
96
97
  },
97
98
  "parent": None,
98
99
  "links": None,
@@ -83,6 +83,9 @@ class WorkflowExecutionInitiatedBody(_BaseWorkflowExecutionBody, Generic[InputsT
83
83
  # This field will be populated during serialization by the serialize_body method
84
84
  workflow_version_exec_config: Optional[Any] = None
85
85
 
86
+ # This field can be populated with arbitrary server metadata during event enrichment
87
+ server_metadata: Optional[Dict[str, Any]] = None
88
+
86
89
  @field_serializer("inputs")
87
90
  def serialize_inputs(self, inputs: InputsType, _info: Any) -> Dict[str, Any]:
88
91
  return default_serializer(inputs)
@@ -58,10 +58,12 @@ class WorkflowInitializationException(Exception):
58
58
  message: str,
59
59
  workflow_definition: Optional[Type["BaseWorkflow"]] = None,
60
60
  code: WorkflowErrorCode = WorkflowErrorCode.INVALID_INPUTS,
61
+ raw_data: Optional[Dict[str, Any]] = None,
61
62
  ):
62
63
 
63
64
  self.message = message
64
65
  self.code = code
66
+ self.raw_data = raw_data
65
67
  self.definition = workflow_definition if workflow_definition is not None else import_workflow_class()
66
68
  super().__init__(message)
67
69
 
@@ -70,4 +72,5 @@ class WorkflowInitializationException(Exception):
70
72
  return WorkflowError(
71
73
  message=self.message,
72
74
  code=self.code,
75
+ raw_data=self.raw_data,
73
76
  )
@@ -1,6 +1,7 @@
1
1
  import asyncio
2
2
  import json
3
3
  import logging
4
+ import traceback
4
5
  from typing import Any, Dict, List, Optional
5
6
 
6
7
  import httpx
@@ -226,6 +227,8 @@ class MCPService:
226
227
  raise NodeException(
227
228
  message=f"Error executing MCP operation '{operation}': {str(e)}",
228
229
  code=WorkflowErrorCode.NODE_EXECUTION,
230
+ stacktrace=traceback.format_exc(),
231
+ raw_data={"operation": operation, "error_type": type(e).__name__, "error_message": str(e)},
229
232
  )
230
233
 
231
234
  def list_tools(self, server: MCPServer) -> List[Dict[str, Any]]:
@@ -249,11 +252,15 @@ class MCPService:
249
252
  )
250
253
  )
251
254
  return result
255
+ except NodeException:
256
+ raise
252
257
  except Exception as e:
253
258
  logger.error(f"Error executing MCP tool '{tool_def.name}': {e}")
254
259
  raise NodeException(
255
260
  message=f"Error executing MCP tool '{tool_def.name}': {str(e)}",
256
261
  code=WorkflowErrorCode.NODE_EXECUTION,
262
+ stacktrace=traceback.format_exc(),
263
+ raw_data={"tool_name": tool_def.name, "error_type": type(e).__name__, "error_message": str(e)},
257
264
  )
258
265
 
259
266
  def hydrate_tool_definitions(self, server_def: MCPServer) -> List[MCPToolDefinition]:
@@ -4,6 +4,8 @@ import json
4
4
  from unittest import mock
5
5
 
6
6
  from vellum.workflows.constants import AuthorizationType
7
+ from vellum.workflows.errors.types import WorkflowErrorCode
8
+ from vellum.workflows.exceptions import NodeException
7
9
  from vellum.workflows.integrations.mcp_service import MCPHttpClient, MCPService
8
10
  from vellum.workflows.types.definition import MCPServer, MCPToolDefinition
9
11
 
@@ -223,3 +225,49 @@ def test_mcp_service_list_tools_handles_errors():
223
225
 
224
226
  # THEN we should get an empty list instead of crashing
225
227
  assert tools == []
228
+
229
+
230
+ def test_mcp_service_call_tool_includes_stacktrace_and_raw_data_on_error():
231
+ """
232
+ Tests that NodeException contains stacktrace and raw_data when call_tool errors.
233
+ """
234
+ # GIVEN an MCP server configuration
235
+ sample_mcp_server = MCPServer(
236
+ name="test-server",
237
+ url="https://test.mcp.server.com/mcp",
238
+ authorization_type=AuthorizationType.BEARER_TOKEN,
239
+ bearer_token_value="test-token",
240
+ )
241
+
242
+ tool_def = MCPToolDefinition(
243
+ name="test-tool",
244
+ server=sample_mcp_server,
245
+ description="A test tool",
246
+ parameters={},
247
+ )
248
+
249
+ # AND a mock httpx client that raises an exception during tool execution
250
+ with mock.patch("vellum.workflows.integrations.mcp_service.httpx.AsyncClient") as mock_client_class:
251
+ mock_client = mock.AsyncMock()
252
+ mock_client_class.return_value = mock_client
253
+ mock_client.post.side_effect = RuntimeError("Tool execution failed")
254
+
255
+ # WHEN we try to execute the tool
256
+ service = MCPService()
257
+
258
+ with pytest.raises(NodeException) as exc_info:
259
+ service.execute_tool(tool_def, {"arg": "value"})
260
+
261
+ # THEN the exception should have the correct error code
262
+ assert exc_info.value.code == WorkflowErrorCode.NODE_EXECUTION
263
+
264
+ # AND the exception should have a stacktrace
265
+ assert exc_info.value.stacktrace is not None
266
+ assert len(exc_info.value.stacktrace) > 0
267
+ assert "RuntimeError: Tool execution failed" in exc_info.value.stacktrace
268
+
269
+ # AND the exception should have raw_data with operation details
270
+ assert exc_info.value.raw_data is not None
271
+ assert exc_info.value.raw_data["operation"] == "call_tool"
272
+ assert exc_info.value.raw_data["error_type"] == "RuntimeError"
273
+ assert exc_info.value.raw_data["error_message"] == "Tool execution failed"
@@ -0,0 +1,3 @@
1
+ from vellum.workflows.loaders.base import BaseWorkflowFinder
2
+
3
+ __all__ = ["BaseWorkflowFinder"]
@@ -0,0 +1,21 @@
1
+ from abc import ABC, abstractmethod
2
+ import importlib.abc
3
+
4
+
5
+ class BaseWorkflowFinder(importlib.abc.MetaPathFinder, ABC):
6
+ """
7
+ Abstract base class for workflow finders that support custom error message formatting.
8
+ """
9
+
10
+ @abstractmethod
11
+ def format_error_message(self, error_message: str) -> str:
12
+ """
13
+ Format an error message to be more user-friendly.
14
+
15
+ Args:
16
+ error_message: The original error message
17
+
18
+ Returns:
19
+ The formatted error message
20
+ """
21
+ pass
@@ -0,0 +1,225 @@
1
+ """Tests for VellumIntegrationTrigger factory pattern and behavior."""
2
+
3
+ import pytest
4
+
5
+ from vellum.workflows.constants import VellumIntegrationProviderType
6
+ from vellum.workflows.references.trigger import TriggerAttributeReference
7
+ from vellum.workflows.triggers.vellum_integration import VellumIntegrationTrigger
8
+
9
+
10
+ def test_factory_creates_trigger_class() -> None:
11
+ """Factory method creates a trigger class with correct attributes."""
12
+ SlackNewMessage = VellumIntegrationTrigger.for_trigger(
13
+ integration_name="SLACK",
14
+ slug="slack_new_message",
15
+ trigger_nano_id="test_nano_123",
16
+ attributes={"channel": "C123456"},
17
+ )
18
+
19
+ assert issubclass(SlackNewMessage, VellumIntegrationTrigger)
20
+ assert SlackNewMessage.provider == VellumIntegrationProviderType.COMPOSIO
21
+ assert SlackNewMessage.integration_name == "SLACK"
22
+ assert SlackNewMessage.slug == "slack_new_message"
23
+ assert SlackNewMessage.trigger_nano_id == "test_nano_123"
24
+ assert SlackNewMessage.attributes == {"channel": "C123456"}
25
+
26
+
27
+ def test_factory_caches_trigger_classes() -> None:
28
+ """Factory returns the same class instance for identical parameters (ensures deterministic UUIDs)."""
29
+ SlackNewMessage1 = VellumIntegrationTrigger.for_trigger(
30
+ integration_name="SLACK",
31
+ slug="slack_new_message",
32
+ trigger_nano_id="test_nano_123",
33
+ )
34
+ SlackNewMessage2 = VellumIntegrationTrigger.for_trigger(
35
+ integration_name="SLACK",
36
+ slug="slack_new_message",
37
+ trigger_nano_id="test_nano_123",
38
+ )
39
+
40
+ assert SlackNewMessage1 is SlackNewMessage2
41
+
42
+
43
+ def test_populates_dynamic_attributes() -> None:
44
+ """Trigger dynamically populates attributes from event_data keys."""
45
+ GithubPush = VellumIntegrationTrigger.for_trigger(
46
+ integration_name="GITHUB",
47
+ slug="github_push_event",
48
+ trigger_nano_id="test_nano_456",
49
+ )
50
+
51
+ event_data = {
52
+ "repository": "vellum-ai/workflows",
53
+ "branch": "main",
54
+ "commits": ["abc123", "def456"],
55
+ }
56
+
57
+ trigger = GithubPush(event_data=event_data)
58
+
59
+ assert getattr(trigger, "repository") == "vellum-ai/workflows"
60
+ assert getattr(trigger, "branch") == "main"
61
+ assert getattr(trigger, "commits") == ["abc123", "def456"]
62
+
63
+
64
+ def test_supports_attribute_references() -> None:
65
+ """Metaclass creates TriggerAttributeReference dynamically on attribute access."""
66
+ SlackNewMessage = VellumIntegrationTrigger.for_trigger(
67
+ integration_name="SLACK",
68
+ slug="slack_new_message",
69
+ trigger_nano_id="test_nano_123",
70
+ )
71
+
72
+ # Metaclass __getattribute__ creates references for undefined attributes
73
+ message_ref = SlackNewMessage.message
74
+ channel_ref = SlackNewMessage.channel
75
+
76
+ assert isinstance(message_ref, TriggerAttributeReference)
77
+ assert isinstance(channel_ref, TriggerAttributeReference)
78
+ assert message_ref.name == "message"
79
+ assert channel_ref.name == "channel"
80
+
81
+
82
+ def test_to_trigger_attribute_values() -> None:
83
+ """to_trigger_attribute_values returns correct attribute mappings."""
84
+ SlackNewMessage = VellumIntegrationTrigger.for_trigger(
85
+ integration_name="SLACK",
86
+ slug="slack_new_message",
87
+ trigger_nano_id="test_nano_123",
88
+ )
89
+
90
+ event_data = {"message": "Hello", "channel": "C123"}
91
+ trigger = SlackNewMessage(event_data=event_data)
92
+
93
+ attr_values = trigger.to_trigger_attribute_values()
94
+
95
+ assert len(attr_values) == 2
96
+ for key in attr_values.keys():
97
+ assert isinstance(key, TriggerAttributeReference)
98
+ assert set(attr_values.values()) == {"Hello", "C123"}
99
+
100
+
101
+ def test_trigger_attribute_id_stability() -> None:
102
+ """Trigger attribute IDs must be stable across factory calls."""
103
+ Slack1 = VellumIntegrationTrigger.for_trigger(
104
+ integration_name="SLACK",
105
+ slug="slack_new_message",
106
+ trigger_nano_id="test_nano_123",
107
+ )
108
+ Slack2 = VellumIntegrationTrigger.for_trigger(
109
+ integration_name="SLACK",
110
+ slug="slack_new_message",
111
+ trigger_nano_id="test_nano_123",
112
+ )
113
+
114
+ # Factory caching ensures same class
115
+ assert Slack1 is Slack2
116
+
117
+ # Attribute references must be deterministic across accesses
118
+ msg_ref_1 = Slack1.message
119
+ msg_ref_2 = Slack2.message
120
+
121
+ # Same trigger class + same attribute name = same reference
122
+ assert msg_ref_1 == msg_ref_2
123
+
124
+
125
+ def test_to_exec_config() -> None:
126
+ """to_exec_config() produces valid ComposioIntegrationTriggerExecConfig."""
127
+ SlackMessage = VellumIntegrationTrigger.for_trigger(
128
+ integration_name="SLACK",
129
+ slug="slack_new_message",
130
+ trigger_nano_id="abc123def456",
131
+ attributes={"channel": "C123456"},
132
+ )
133
+
134
+ exec_config = SlackMessage.to_exec_config()
135
+
136
+ assert exec_config.type == "COMPOSIO_INTEGRATION_TRIGGER"
137
+ assert exec_config.provider == VellumIntegrationProviderType.COMPOSIO
138
+ assert exec_config.integration_name == "SLACK"
139
+ assert exec_config.slug == "slack_new_message"
140
+ assert exec_config.trigger_nano_id == "abc123def456"
141
+ assert exec_config.attributes == {"channel": "C123456"}
142
+
143
+
144
+ def test_to_exec_config_base_class_fails() -> None:
145
+ """to_exec_config() raises error on base class."""
146
+ with pytest.raises(AttributeError, match="factory-generated trigger classes"):
147
+ VellumIntegrationTrigger.to_exec_config()
148
+
149
+
150
+ def test_empty_event_data() -> None:
151
+ """Trigger handles empty event data gracefully."""
152
+ SlackMessage = VellumIntegrationTrigger.for_trigger(
153
+ integration_name="SLACK",
154
+ slug="slack_new_message",
155
+ trigger_nano_id="test_nano_123",
156
+ )
157
+ trigger = SlackMessage(event_data={})
158
+
159
+ attr_values = trigger.to_trigger_attribute_values()
160
+ assert attr_values == {}
161
+
162
+
163
+ def test_attribute_name_does_not_conflict_with_class_variables() -> None:
164
+ """Event data attributes don't conflict with class variables."""
165
+ SlackMessage = VellumIntegrationTrigger.for_trigger(
166
+ integration_name="SLACK",
167
+ slug="slack_new_message",
168
+ trigger_nano_id="test_nano_123",
169
+ )
170
+
171
+ # Event data with key that matches a class variable name
172
+ event_data = {"provider": "some_value", "message": "Hello"}
173
+ trigger = SlackMessage(event_data=event_data)
174
+
175
+ # Instance attribute should be set from event_data
176
+ assert trigger.provider == "some_value" # Instance attr from event_data
177
+ # But class variable should be unchanged
178
+ assert SlackMessage.provider == VellumIntegrationProviderType.COMPOSIO
179
+
180
+
181
+ def test_non_json_serializable_attributes_fail_fast() -> None:
182
+ """Non-JSON-serializable attributes raise ValueError with clear message."""
183
+
184
+ # Custom objects are not JSON-serializable
185
+ class CustomObject:
186
+ pass
187
+
188
+ with pytest.raises(ValueError, match="must be JSON-serializable"):
189
+ VellumIntegrationTrigger.for_trigger(
190
+ integration_name="SLACK",
191
+ slug="slack_new_message",
192
+ trigger_nano_id="test_nano_123",
193
+ attributes={"custom": CustomObject()},
194
+ )
195
+
196
+ # Sets are not JSON-serializable
197
+ with pytest.raises(ValueError, match="must be JSON-serializable"):
198
+ VellumIntegrationTrigger.for_trigger(
199
+ integration_name="SLACK",
200
+ slug="slack_new_message",
201
+ trigger_nano_id="test_nano_123",
202
+ attributes={"tags": {"a", "b", "c"}},
203
+ )
204
+
205
+
206
+ def test_nested_json_serializable_attributes_work() -> None:
207
+ """Nested JSON-serializable attributes work correctly."""
208
+ SlackMessage = VellumIntegrationTrigger.for_trigger(
209
+ integration_name="SLACK",
210
+ slug="slack_new_message",
211
+ trigger_nano_id="test_nano_123",
212
+ attributes={
213
+ "channel": "C123456",
214
+ "filters": {"status": "active", "priority": ["high", "medium"]},
215
+ "count": 42,
216
+ "enabled": True,
217
+ },
218
+ )
219
+
220
+ assert SlackMessage.attributes == {
221
+ "channel": "C123456",
222
+ "filters": {"status": "active", "priority": ["high", "medium"]},
223
+ "count": 42,
224
+ "enabled": True,
225
+ }
@@ -2,5 +2,6 @@ from vellum.workflows.triggers.base import BaseTrigger
2
2
  from vellum.workflows.triggers.integration import IntegrationTrigger
3
3
  from vellum.workflows.triggers.manual import ManualTrigger
4
4
  from vellum.workflows.triggers.slack import SlackTrigger
5
+ from vellum.workflows.triggers.vellum_integration import VellumIntegrationTrigger
5
6
 
6
- __all__ = ["BaseTrigger", "IntegrationTrigger", "ManualTrigger", "SlackTrigger"]
7
+ __all__ = ["BaseTrigger", "IntegrationTrigger", "ManualTrigger", "SlackTrigger", "VellumIntegrationTrigger"]