vellum-ai 1.7.5__py3-none-any.whl → 1.7.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. vellum/client/core/client_wrapper.py +2 -2
  2. vellum/workflows/nodes/bases/base.py +28 -9
  3. vellum/workflows/nodes/displayable/search_node/node.py +2 -1
  4. vellum/workflows/nodes/displayable/search_node/tests/test_node.py +14 -0
  5. vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py +7 -1
  6. vellum/workflows/nodes/displayable/subworkflow_deployment_node/tests/test_node.py +1 -1
  7. vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py +54 -0
  8. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +26 -24
  9. vellum/workflows/runner/runner.py +25 -37
  10. vellum/workflows/triggers/__init__.py +2 -1
  11. vellum/workflows/triggers/integration.py +62 -0
  12. vellum/workflows/triggers/tests/__init__.py +1 -0
  13. vellum/workflows/triggers/tests/test_integration.py +102 -0
  14. vellum/workflows/workflows/base.py +17 -3
  15. {vellum_ai-1.7.5.dist-info → vellum_ai-1.7.6.dist-info}/METADATA +1 -1
  16. {vellum_ai-1.7.5.dist-info → vellum_ai-1.7.6.dist-info}/RECORD +24 -21
  17. vellum_cli/push.py +1 -5
  18. vellum_cli/tests/test_push.py +86 -0
  19. vellum_ee/workflows/display/tests/workflow_serialization/test_manual_trigger_serialization.py +16 -19
  20. vellum_ee/workflows/display/workflows/base_workflow_display.py +23 -14
  21. vellum_ee/workflows/tests/test_server.py +40 -1
  22. {vellum_ai-1.7.5.dist-info → vellum_ai-1.7.6.dist-info}/LICENSE +0 -0
  23. {vellum_ai-1.7.5.dist-info → vellum_ai-1.7.6.dist-info}/WHEEL +0 -0
  24. {vellum_ai-1.7.5.dist-info → vellum_ai-1.7.6.dist-info}/entry_points.txt +0 -0
@@ -27,10 +27,10 @@ class BaseClientWrapper:
27
27
 
28
28
  def get_headers(self) -> typing.Dict[str, str]:
29
29
  headers: typing.Dict[str, str] = {
30
- "User-Agent": "vellum-ai/1.7.5",
30
+ "User-Agent": "vellum-ai/1.7.6",
31
31
  "X-Fern-Language": "Python",
32
32
  "X-Fern-SDK-Name": "vellum-ai",
33
- "X-Fern-SDK-Version": "1.7.5",
33
+ "X-Fern-SDK-Version": "1.7.6",
34
34
  **(self.get_custom_headers() or {}),
35
35
  }
36
36
  if self._api_version is not None:
@@ -1,10 +1,26 @@
1
1
  from abc import ABC, ABCMeta, abstractmethod
2
+ from collections.abc import Callable as CollectionsCallable
2
3
  from dataclasses import field
3
4
  from functools import cached_property, reduce
4
5
  import inspect
5
6
  from types import MappingProxyType
6
7
  from uuid import UUID, uuid4
7
- from typing import Any, Dict, Generic, Iterator, Optional, Set, Tuple, Type, TypeVar, Union, cast, get_args
8
+ from typing import (
9
+ Any,
10
+ Callable as TypingCallable,
11
+ Dict,
12
+ Generic,
13
+ Iterator,
14
+ Optional,
15
+ Set,
16
+ Tuple,
17
+ Type,
18
+ TypeVar,
19
+ Union,
20
+ cast,
21
+ get_args,
22
+ get_origin,
23
+ )
8
24
 
9
25
  from vellum.workflows.constants import undefined
10
26
  from vellum.workflows.descriptors.base import BaseDescriptor
@@ -43,15 +59,15 @@ def _is_nested_class(nested: Any, parent: Type) -> bool:
43
59
  ) or any(_is_nested_class(nested, base) for base in parent.__bases__)
44
60
 
45
61
 
46
- def _is_annotated(cls: Type, name: str) -> bool:
62
+ def _is_annotated(cls: Type, name: str) -> Any:
47
63
  if name in cls.__annotations__:
48
- return True
64
+ return cls.__annotations__[name]
49
65
 
50
66
  for base in cls.__bases__:
51
- if _is_annotated(base, name):
52
- return True
67
+ if annotation := _is_annotated(base, name):
68
+ return annotation
53
69
 
54
- return False
70
+ return None
55
71
 
56
72
 
57
73
  class BaseNodeMeta(ABCMeta):
@@ -151,8 +167,10 @@ class BaseNodeMeta(ABCMeta):
151
167
  try:
152
168
  attribute = super().__getattribute__(name)
153
169
  except AttributeError as e:
154
- if _is_annotated(cls, name):
155
- attribute = None
170
+ annotation = _is_annotated(cls, name)
171
+ origin_annotation = get_origin(annotation)
172
+ if origin_annotation is not CollectionsCallable and origin_annotation is not TypingCallable:
173
+ attribute = undefined
156
174
  else:
157
175
  raise e
158
176
 
@@ -482,7 +500,8 @@ class BaseNode(Generic[StateType], ABC, BaseExecutable, metaclass=BaseNodeMeta):
482
500
  setattr(base, leaf, input_value)
483
501
 
484
502
  for descriptor in self.__class__:
485
- if not descriptor.instance:
503
+ if descriptor.instance is undefined:
504
+ setattr(self, descriptor.name, undefined)
486
505
  continue
487
506
 
488
507
  if any(isinstance(t, type) and issubclass(t, BaseDescriptor) for t in descriptor.types):
@@ -1,6 +1,7 @@
1
1
  import json
2
2
  from typing import ClassVar
3
3
 
4
+ from vellum.workflows.constants import undefined
4
5
  from vellum.workflows.errors import WorkflowErrorCode
5
6
  from vellum.workflows.exceptions import NodeException
6
7
  from vellum.workflows.nodes.displayable.bases import BaseSearchNode as BaseSearchNode
@@ -37,7 +38,7 @@ class SearchNode(BaseSearchNode[StateType]):
37
38
  text: str
38
39
 
39
40
  def run(self) -> Outputs:
40
- if self.query is None or self.query == "":
41
+ if self.query is undefined or self.query is None or self.query == "":
41
42
  raise NodeException(
42
43
  message="Search query is required but was not provided",
43
44
  code=WorkflowErrorCode.INVALID_INPUTS,
@@ -234,3 +234,17 @@ def test_run_workflow__invalid_query_raises_validation_error(invalid_query):
234
234
  assert exc_info.value.code == WorkflowErrorCode.INVALID_INPUTS
235
235
  assert "query" in exc_info.value.message.lower()
236
236
  assert "required" in exc_info.value.message.lower() or "missing" in exc_info.value.message.lower()
237
+
238
+
239
+ def test_run_workflow__missing_query_attribute_raises_validation_error():
240
+ """Confirm that a SearchNode without a query attribute defined raises INVALID_INPUTS"""
241
+
242
+ class MySearchNode(SearchNode):
243
+ document_index = "document_index"
244
+
245
+ with pytest.raises(NodeException) as exc_info:
246
+ MySearchNode().run()
247
+
248
+ assert exc_info.value.code == WorkflowErrorCode.INVALID_INPUTS
249
+ assert "query" in exc_info.value.message.lower()
250
+ assert "required" in exc_info.value.message.lower()
@@ -15,7 +15,7 @@ from vellum import (
15
15
  from vellum.client.core import RequestOptions
16
16
  from vellum.client.core.api_error import ApiError
17
17
  from vellum.client.types.chat_message_request import ChatMessageRequest
18
- from vellum.workflows.constants import LATEST_RELEASE_TAG, OMIT
18
+ from vellum.workflows.constants import LATEST_RELEASE_TAG, OMIT, undefined
19
19
  from vellum.workflows.context import execution_context, get_execution_context, get_parent_context
20
20
  from vellum.workflows.errors import WorkflowErrorCode
21
21
  from vellum.workflows.errors.types import workflow_event_error_to_workflow_error
@@ -226,6 +226,12 @@ class SubworkflowDeploymentNode(BaseNode[StateType], Generic[StateType]):
226
226
  **request_options.get("additional_body_parameters", {}),
227
227
  }
228
228
 
229
+ if self.deployment is undefined:
230
+ raise NodeException(
231
+ code=WorkflowErrorCode.NODE_EXECUTION,
232
+ message="Expected subworkflow deployment attribute to be either a UUID or STR, got `undefined` instead",
233
+ )
234
+
229
235
  try:
230
236
  deployment_id = str(self.deployment) if isinstance(self.deployment, UUID) else None
231
237
  deployment_name = self.deployment if isinstance(self.deployment, str) else None
@@ -265,7 +265,7 @@ def test_run_workflow__no_deployment():
265
265
 
266
266
  # AND the error message should be correct
267
267
  assert exc_info.value.code == WorkflowErrorCode.NODE_EXECUTION
268
- assert "Expected subworkflow deployment attribute to be either a UUID or STR, got None instead" in str(
268
+ assert "Expected subworkflow deployment attribute to be either a UUID or STR, got `undefined` instead" in str(
269
269
  exc_info.value
270
270
  )
271
271
 
@@ -1,3 +1,4 @@
1
+ import pytest
1
2
  import json
2
3
  from uuid import uuid4
3
4
  from typing import Any, Iterator, List
@@ -14,6 +15,8 @@ from vellum.client.types.string_vellum_value import StringVellumValue
14
15
  from vellum.client.types.variable_prompt_block import VariablePromptBlock
15
16
  from vellum.prompts.constants import DEFAULT_PROMPT_PARAMETERS
16
17
  from vellum.workflows import BaseWorkflow
18
+ from vellum.workflows.errors.types import WorkflowErrorCode
19
+ from vellum.workflows.exceptions import NodeException
17
20
  from vellum.workflows.inputs.base import BaseInputs
18
21
  from vellum.workflows.nodes.bases import BaseNode
19
22
  from vellum.workflows.nodes.displayable.tool_calling_node.node import ToolCallingNode
@@ -372,3 +375,54 @@ def test_tool_calling_node_workflow_is_dynamic(vellum_adhoc_prompt_client):
372
375
  assert initiated_events[0].body.workflow_definition.is_dynamic is False # Main workflow
373
376
  assert initiated_events[1].body.workflow_definition.is_dynamic is True # Tool calling internal
374
377
  assert initiated_events[2].body.workflow_definition.is_dynamic is True # Inline workflow
378
+
379
+
380
+ def test_tool_node_preserves_node_exception():
381
+ """Test that tool nodes preserve NodeException error codes and raw_data."""
382
+
383
+ def failing_function() -> str:
384
+ raise NodeException(
385
+ message="Custom error",
386
+ code=WorkflowErrorCode.INVALID_INPUTS,
387
+ raw_data={"key": "value"},
388
+ )
389
+
390
+ tool_prompt_node = create_tool_prompt_node(
391
+ ml_model="test-model",
392
+ blocks=[],
393
+ functions=[failing_function],
394
+ prompt_inputs=None,
395
+ parameters=DEFAULT_PROMPT_PARAMETERS,
396
+ )
397
+
398
+ function_node_class = create_function_node(
399
+ function=failing_function,
400
+ tool_prompt_node=tool_prompt_node,
401
+ )
402
+
403
+ state = ToolCallingState(
404
+ meta=StateMeta(
405
+ node_outputs={
406
+ tool_prompt_node.Outputs.results: [
407
+ FunctionCallVellumValue(
408
+ value=FunctionCall(
409
+ arguments={},
410
+ id="call_123",
411
+ name="failing_function",
412
+ state="FULFILLED",
413
+ ),
414
+ )
415
+ ],
416
+ },
417
+ )
418
+ )
419
+
420
+ function_node = function_node_class(state=state)
421
+
422
+ with pytest.raises(NodeException) as exc_info:
423
+ list(function_node.run())
424
+
425
+ e = exc_info.value
426
+ assert e.code == WorkflowErrorCode.INVALID_INPUTS
427
+ assert e.raw_data == {"key": "value"}
428
+ assert "Custom error" in e.message
@@ -55,6 +55,28 @@ class FunctionCallNodeMixin:
55
55
 
56
56
  function_call_output: List[PromptOutput]
57
57
 
58
+ def _handle_tool_exception(self, e: Exception, tool_type: str, tool_name: str) -> None:
59
+ """
60
+ Re-raise exceptions with contextual information while preserving NodeException details.
61
+
62
+ Args:
63
+ e: The caught exception
64
+ tool_type: Type of tool (e.g., "function", "MCP tool", "Vellum Integration tool")
65
+ tool_name: Name of the tool that failed
66
+ """
67
+ if isinstance(e, NodeException):
68
+ # Preserve original error code and raw_data while adding context
69
+ raise NodeException(
70
+ message=f"Error executing {tool_type} '{tool_name}': {e.message}",
71
+ code=e.code,
72
+ raw_data=e.raw_data,
73
+ ) from e
74
+ else:
75
+ raise NodeException(
76
+ message=f"Error executing {tool_type} '{tool_name}': {str(e)}",
77
+ code=WorkflowErrorCode.NODE_EXECUTION,
78
+ ) from e
79
+
58
80
  def _extract_function_arguments(self) -> dict:
59
81
  """Extract arguments from function call output."""
60
82
  current_index = getattr(self, "state").current_prompt_output_index
@@ -201,11 +223,7 @@ class FunctionNode(BaseNode[ToolCallingState], FunctionCallNodeMixin):
201
223
  try:
202
224
  result = self.function_definition(**arguments)
203
225
  except Exception as e:
204
- function_name = self.function_definition.__name__
205
- raise NodeException(
206
- message=f"Error executing function '{function_name}': {str(e)}",
207
- code=WorkflowErrorCode.NODE_EXECUTION,
208
- ) from e
226
+ self._handle_tool_exception(e, "function", self.function_definition.__name__)
209
227
 
210
228
  # Add the result to the chat history
211
229
  self._add_function_result_to_chat_history(result, self.state)
@@ -232,10 +250,7 @@ class ComposioNode(BaseNode[ToolCallingState], FunctionCallNodeMixin):
232
250
  else:
233
251
  result = composio_service.execute_tool(tool_name=self.composio_tool.action, arguments=arguments)
234
252
  except Exception as e:
235
- raise NodeException(
236
- message=f"Error executing Composio tool '{self.composio_tool.action}': {str(e)}",
237
- code=WorkflowErrorCode.NODE_EXECUTION,
238
- ) from e
253
+ self._handle_tool_exception(e, "Composio tool", self.composio_tool.action)
239
254
 
240
255
  # Add result to chat history
241
256
  self._add_function_result_to_chat_history(result, self.state)
@@ -255,10 +270,7 @@ class MCPNode(BaseNode[ToolCallingState], FunctionCallNodeMixin):
255
270
  mcp_service = MCPService()
256
271
  result = mcp_service.execute_tool(tool_def=self.mcp_tool, arguments=arguments)
257
272
  except Exception as e:
258
- raise NodeException(
259
- message=f"Error executing MCP tool '{self.mcp_tool.name}': {str(e)}",
260
- code=WorkflowErrorCode.NODE_EXECUTION,
261
- ) from e
273
+ self._handle_tool_exception(e, "MCP tool", self.mcp_tool.name)
262
274
 
263
275
  # Add result to chat history
264
276
  self._add_function_result_to_chat_history(result, self.state)
@@ -283,18 +295,8 @@ class VellumIntegrationNode(BaseNode[ToolCallingState], FunctionCallNodeMixin):
283
295
  tool_name=self.vellum_integration_tool.name,
284
296
  arguments=arguments,
285
297
  )
286
- except NodeException as e:
287
- # Preserve original error code and raw_data while adding context
288
- raise NodeException(
289
- message=f"Error executing Vellum Integration tool '{self.vellum_integration_tool.name}': {e.message}",
290
- code=e.code,
291
- raw_data=e.raw_data,
292
- ) from e
293
298
  except Exception as e:
294
- raise NodeException(
295
- message=f"Error executing Vellum Integration tool '{self.vellum_integration_tool.name}': {str(e)}",
296
- code=WorkflowErrorCode.NODE_EXECUTION,
297
- ) from e
299
+ self._handle_tool_exception(e, "Vellum Integration tool", self.vellum_integration_tool.name)
298
300
 
299
301
  # Add result to chat history
300
302
  self._add_function_result_to_chat_history(result, self.state)
@@ -466,45 +466,11 @@ class WorkflowRunner(Generic[StateType]):
466
466
  parent=execution.parent_context,
467
467
  )
468
468
  except NodeException as e:
469
- logger.info(e)
470
- captured_stacktrace = traceback.format_exc()
471
-
472
- yield NodeExecutionRejectedEvent(
473
- trace_id=execution.trace_id,
474
- span_id=span_id,
475
- body=NodeExecutionRejectedBody(
476
- node_definition=node.__class__,
477
- error=e.error,
478
- stacktrace=captured_stacktrace,
479
- ),
480
- parent=execution.parent_context,
481
- )
469
+ yield self._handle_run_node_exception(e, "Node Exception", execution, span_id, node)
482
470
  except WorkflowInitializationException as e:
483
- logger.info(e)
484
- captured_stacktrace = traceback.format_exc()
485
- yield NodeExecutionRejectedEvent(
486
- trace_id=execution.trace_id,
487
- span_id=span_id,
488
- body=NodeExecutionRejectedBody(
489
- node_definition=node.__class__,
490
- error=e.error,
491
- stacktrace=captured_stacktrace,
492
- ),
493
- parent=execution.parent_context,
494
- )
471
+ yield self._handle_run_node_exception(e, "Workflow Initialization Exception", execution, span_id, node)
495
472
  except InvalidExpressionException as e:
496
- logger.info(e)
497
- captured_stacktrace = traceback.format_exc()
498
- yield NodeExecutionRejectedEvent(
499
- trace_id=execution.trace_id,
500
- span_id=span_id,
501
- body=NodeExecutionRejectedBody(
502
- node_definition=node.__class__,
503
- error=e.error,
504
- stacktrace=captured_stacktrace,
505
- ),
506
- parent=execution.parent_context,
507
- )
473
+ yield self._handle_run_node_exception(e, "Invalid Expression Exception", execution, span_id, node)
508
474
  except Exception as e:
509
475
  error_message = self._parse_error_message(e)
510
476
  if error_message is None:
@@ -529,6 +495,28 @@ class WorkflowRunner(Generic[StateType]):
529
495
 
530
496
  logger.debug(f"Finished running node: {node.__class__.__name__}")
531
497
 
498
+ def _handle_run_node_exception(
499
+ self,
500
+ exception: Union[NodeException, WorkflowInitializationException, InvalidExpressionException],
501
+ prefix: str,
502
+ execution: ExecutionContext,
503
+ span_id: UUID,
504
+ node: BaseNode[StateType],
505
+ ) -> NodeExecutionRejectedEvent:
506
+ logger.info(f"{prefix}: {exception}")
507
+ captured_stacktrace = traceback.format_exc()
508
+
509
+ return NodeExecutionRejectedEvent(
510
+ trace_id=execution.trace_id,
511
+ span_id=span_id,
512
+ body=NodeExecutionRejectedBody(
513
+ node_definition=node.__class__,
514
+ error=exception.error,
515
+ stacktrace=captured_stacktrace,
516
+ ),
517
+ parent=execution.parent_context,
518
+ )
519
+
532
520
  def _parse_error_message(self, exception: Exception) -> Optional[str]:
533
521
  try:
534
522
  _, _, tb = sys.exc_info()
@@ -1,4 +1,5 @@
1
1
  from vellum.workflows.triggers.base import BaseTrigger
2
+ from vellum.workflows.triggers.integration import IntegrationTrigger
2
3
  from vellum.workflows.triggers.manual import ManualTrigger
3
4
 
4
- __all__ = ["BaseTrigger", "ManualTrigger"]
5
+ __all__ = ["BaseTrigger", "IntegrationTrigger", "ManualTrigger"]
@@ -0,0 +1,62 @@
1
+ from abc import ABC
2
+ from typing import ClassVar, Optional
3
+
4
+ from vellum.workflows.outputs.base import BaseOutputs
5
+ from vellum.workflows.triggers.base import BaseTrigger
6
+
7
+
8
+ class IntegrationTrigger(BaseTrigger, ABC):
9
+ """
10
+ Base class for integration-based triggers (Slack, Email, etc.).
11
+
12
+ Integration triggers:
13
+ - Are initiated by external events (webhooks, API calls)
14
+ - Produce outputs that downstream nodes can reference
15
+ - Require configuration (auth, webhooks, etc.)
16
+
17
+ Examples:
18
+ # Define an integration trigger
19
+ class MyIntegrationTrigger(IntegrationTrigger):
20
+ class Outputs(IntegrationTrigger.Outputs):
21
+ data: str
22
+
23
+ @classmethod
24
+ def process_event(cls, event_data: dict):
25
+ return cls.Outputs(data=event_data.get("data", ""))
26
+
27
+ # Use in workflow
28
+ class MyWorkflow(BaseWorkflow):
29
+ graph = MyIntegrationTrigger >> ProcessNode
30
+
31
+ Note:
32
+ Unlike ManualTrigger, integration triggers provide structured outputs
33
+ that downstream nodes can reference directly via Outputs.
34
+ """
35
+
36
+ class Outputs(BaseOutputs):
37
+ """Base outputs for integration triggers."""
38
+
39
+ pass
40
+
41
+ # Configuration that can be set at runtime
42
+ config: ClassVar[Optional[dict]] = None
43
+
44
+ @classmethod
45
+ def process_event(cls, event_data: dict) -> "IntegrationTrigger.Outputs":
46
+ """
47
+ Process incoming webhook/event data and return trigger outputs.
48
+
49
+ This method should be implemented by subclasses to parse external
50
+ event payloads (e.g., Slack webhooks, email notifications) into
51
+ structured trigger outputs.
52
+
53
+ Args:
54
+ event_data: Raw event data from the external system
55
+
56
+ Returns:
57
+ Trigger outputs containing parsed event data
58
+
59
+ Raises:
60
+ NotImplementedError: If subclass doesn't implement this method
61
+ """
62
+ raise NotImplementedError(f"{cls.__name__} must implement process_event() method to handle external events")
@@ -0,0 +1 @@
1
+ # Tests for workflow triggers
@@ -0,0 +1,102 @@
1
+ """Tests for IntegrationTrigger base class."""
2
+
3
+ import pytest
4
+
5
+ from vellum.workflows.nodes.bases.base import BaseNode
6
+ from vellum.workflows.triggers.integration import IntegrationTrigger
7
+
8
+
9
+ def test_integration_trigger__is_abstract():
10
+ """IntegrationTrigger cannot be instantiated directly (ABC)."""
11
+ # WHEN we try to call process_event on IntegrationTrigger directly
12
+ # THEN it raises NotImplementedError
13
+ with pytest.raises(NotImplementedError, match="must implement process_event"):
14
+ IntegrationTrigger.process_event({})
15
+
16
+
17
+ def test_integration_trigger__outputs_class_exists():
18
+ """IntegrationTrigger has Outputs class."""
19
+ # GIVEN IntegrationTrigger
20
+ # THEN it has an Outputs class
21
+ assert hasattr(IntegrationTrigger, "Outputs")
22
+
23
+
24
+ def test_integration_trigger__can_be_subclassed():
25
+ """IntegrationTrigger can be subclassed to create concrete triggers."""
26
+
27
+ # GIVEN a concrete implementation of IntegrationTrigger
28
+ class TestTrigger(IntegrationTrigger):
29
+ class Outputs(IntegrationTrigger.Outputs):
30
+ data: str
31
+
32
+ @classmethod
33
+ def process_event(cls, event_data: dict):
34
+ return cls.Outputs(data=event_data.get("data", ""))
35
+
36
+ # WHEN we process an event
37
+ result = TestTrigger.process_event({"data": "test"})
38
+
39
+ # THEN it returns the expected outputs
40
+ assert result.data == "test"
41
+
42
+
43
+ def test_integration_trigger__graph_syntax():
44
+ """IntegrationTrigger can be used in graph syntax."""
45
+
46
+ # GIVEN a concrete trigger and a node
47
+ class TestTrigger(IntegrationTrigger):
48
+ class Outputs(IntegrationTrigger.Outputs):
49
+ value: str
50
+
51
+ @classmethod
52
+ def process_event(cls, event_data: dict):
53
+ return cls.Outputs(value=event_data.get("value", ""))
54
+
55
+ class TestNode(BaseNode):
56
+ pass
57
+
58
+ # WHEN we use trigger >> node syntax
59
+ graph = TestTrigger >> TestNode
60
+
61
+ # THEN a graph is created
62
+ assert graph is not None
63
+ assert len(list(graph.trigger_edges)) == 1
64
+ assert list(graph.trigger_edges)[0].trigger_class == TestTrigger
65
+ assert list(graph.trigger_edges)[0].to_node == TestNode
66
+
67
+
68
+ def test_integration_trigger__multiple_entrypoints():
69
+ """IntegrationTrigger works with multiple entry points."""
70
+
71
+ # GIVEN a trigger and multiple nodes
72
+ class TestTrigger(IntegrationTrigger):
73
+ class Outputs(IntegrationTrigger.Outputs):
74
+ msg: str
75
+
76
+ @classmethod
77
+ def process_event(cls, event_data: dict):
78
+ return cls.Outputs(msg=event_data.get("msg", ""))
79
+
80
+ class NodeA(BaseNode):
81
+ pass
82
+
83
+ class NodeB(BaseNode):
84
+ pass
85
+
86
+ # WHEN we use trigger >> {nodes} syntax
87
+ graph = TestTrigger >> {NodeA, NodeB}
88
+
89
+ # THEN both nodes are entrypoints
90
+ trigger_edges = list(graph.trigger_edges)
91
+ assert len(trigger_edges) == 2
92
+ target_nodes = {edge.to_node for edge in trigger_edges}
93
+ assert target_nodes == {NodeA, NodeB}
94
+
95
+
96
+ def test_integration_trigger__config_attribute():
97
+ """IntegrationTrigger has optional config attribute."""
98
+
99
+ # GIVEN IntegrationTrigger
100
+ # THEN it has a config class variable
101
+ assert hasattr(IntegrationTrigger, "config")
102
+ assert IntegrationTrigger.config is None
@@ -26,6 +26,8 @@ from typing import (
26
26
  overload,
27
27
  )
28
28
 
29
+ from pydantic import ValidationError
30
+
29
31
  from vellum.workflows.edges import Edge
30
32
  from vellum.workflows.emitters.base import BaseWorkflowEmitter
31
33
  from vellum.workflows.errors import WorkflowError, WorkflowErrorCode
@@ -684,6 +686,10 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
684
686
  workflow_path = f"{module_path}.workflow"
685
687
  try:
686
688
  module = importlib.import_module(workflow_path)
689
+ except ValidationError as e:
690
+ raise WorkflowInitializationException(
691
+ message=f"Pydantic Model Validation defined in Workflow Failed: {e}"
692
+ ) from e
687
693
  except TypeError as e:
688
694
  if "Unexpected graph type" in str(e) or "unhashable type: 'set'" in str(e):
689
695
  raise WorkflowInitializationException(
@@ -691,9 +697,17 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
691
697
  "Please contact Vellum support for assistance with Workflow configuration."
692
698
  ) from e
693
699
  else:
694
- raise
695
- except (SyntaxError, ImportError, ModuleNotFoundError) as e:
696
- raise WorkflowInitializationException(message=f"Failed to load workflow module: {e}") from e
700
+ raise WorkflowInitializationException(message=f"Type Error raised while loading Workflow: {e}") from e
701
+ except SyntaxError as e:
702
+ raise WorkflowInitializationException(message=f"Syntax Error raised while loading Workflow: {e}") from e
703
+ except ModuleNotFoundError as e:
704
+ raise WorkflowInitializationException(message=f"Workflow module not found: {e}") from e
705
+ except ImportError as e:
706
+ raise WorkflowInitializationException(message=f"Invalid import found while loading Workflow: {e}") from e
707
+ except NameError as e:
708
+ raise WorkflowInitializationException(message=f"Invalid variable reference: {e}") from e
709
+ except Exception as e:
710
+ raise WorkflowInitializationException(message=f"Unexpected failure while loading module: {e}") from e
697
711
  workflows: List[Type[BaseWorkflow]] = []
698
712
  for name in dir(module):
699
713
  if name.startswith("__"):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 1.7.5
3
+ Version: 1.7.6
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -9,7 +9,7 @@ vellum_cli/logger.py,sha256=dcM_OmgqXLo93vDYswO5ylyUQQcTfnA5GTd5tbIt3wM,1446
9
9
  vellum_cli/move.py,sha256=lCHQ-U4BspgS512GxFFvUrglitaHkWfuKn1Hpfcn7-Q,2053
10
10
  vellum_cli/ping.py,sha256=p_BCCRjgPhng6JktuECtkDQLbhopt6JpmrtGoLnLJT8,1161
11
11
  vellum_cli/pull.py,sha256=udYyPlJ6VKDdh78rApNJOZgxHl82fcV6iGnRPSdX1LY,14750
12
- vellum_cli/push.py,sha256=KpBGq7B-ffwa9QTHsTRSk73l-tfKc3gyiBSn9Pwlsak,11878
12
+ vellum_cli/push.py,sha256=epoQNdFdXUzLlfwE2ZTklfy3DW6bI4-Pgv1QRS8CZXk,11803
13
13
  vellum_cli/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  vellum_cli/tests/conftest.py,sha256=wx3PlJjVB0HRf5dr2b_idOIw27WPPl0J0FNbhIJJaVk,1689
15
15
  vellum_cli/tests/test_config.py,sha256=uvKGDc8BoVyT9_H0Z-g8469zVxomn6Oi3Zj-vK7O_wU,2631
@@ -20,7 +20,7 @@ vellum_cli/tests/test_main.py,sha256=qDZG-aQauPwBwM6A2DIu1494n47v3pL28XakTbLGZ-k
20
20
  vellum_cli/tests/test_move.py,sha256=FIrL1xlH5oFKGX2MugcTKL8JilpopmUC7hP5OaqF5zw,5213
21
21
  vellum_cli/tests/test_ping.py,sha256=b3aQLd-N59_8w2rRiWqwpB1rlHaKEYVbAj1Y3hi7A-g,2605
22
22
  vellum_cli/tests/test_pull.py,sha256=e2XHzcHIx9k-FyuNAl7wMSNsSSebPGyP6U05JGcddFs,49447
23
- vellum_cli/tests/test_push.py,sha256=2MjkNKr_9Guv5Exjsm3L1BeVXmPkKUcCSiKnp90HgW4,41996
23
+ vellum_cli/tests/test_push.py,sha256=oQ3x28G6IxplmMWCcPEYY46nOYAEPaihcMVsN4quQ5Q,45000
24
24
  vellum_ee/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  vellum_ee/assets/node-definitions.json,sha256=Mm3c1nfEa1QjWWzNvIJlhahDcY4SM3wQm8og_x3jyd8,30755
26
26
  vellum_ee/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -110,7 +110,7 @@ vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling
110
110
  vellum_ee/workflows/display/tests/workflow_serialization/test_basic_try_node_serialization.py,sha256=pLCyMScV88DTBXRH7jXaXOEA1GBq8NIipCUFwIAWnwI,2771
111
111
  vellum_ee/workflows/display/tests/workflow_serialization/test_complex_terminal_node_serialization.py,sha256=exT7U-axwtYgFylagScflSQLJEND51qIAx2UATju6JM,6023
112
112
  vellum_ee/workflows/display/tests/workflow_serialization/test_final_output_node_map_reference_serialization.py,sha256=vl3pxUJlrYRA8zzFJ-gRm7fe-5fviLNSIsUC7imnMqk,3502
113
- vellum_ee/workflows/display/tests/workflow_serialization/test_manual_trigger_serialization.py,sha256=MQiZZfMtCps-_Me-SqH3TnC7sh52ApcF0_6ctoYZ63g,3798
113
+ vellum_ee/workflows/display/tests/workflow_serialization/test_manual_trigger_serialization.py,sha256=sd0lbity6yVIJ3HbMZEcL1wJEYoihqVW2Bjx8YMmrAM,3657
114
114
  vellum_ee/workflows/display/tests/workflow_serialization/test_terminal_node_any_serialization.py,sha256=4WAmSEJZlDBLPhsD1f4GwY9ahB9F6qJKGnL6j7ZYlzQ,1740
115
115
  vellum_ee/workflows/display/tests/workflow_serialization/test_web_search_node_serialization.py,sha256=vbDFBrWUPeeW7cxjNA6SXrsHlYcbOAhlQ4C45Vdnr1c,3428
116
116
  vellum_ee/workflows/display/tests/workflow_serialization/test_workflow_input_parameterization_error.py,sha256=vAdmn3YTBDpo55znbydQxsgg9ASqHcvsUPwiBR_7wfo,1461
@@ -127,7 +127,7 @@ vellum_ee/workflows/display/utils/tests/test_events.py,sha256=42IEBnMbaQrH8gigw5
127
127
  vellum_ee/workflows/display/utils/vellum.py,sha256=Bt7kdLdXoBsHn5dVEY2uKcF542VL09jwu8J_30rl2vk,6413
128
128
  vellum_ee/workflows/display/vellum.py,sha256=J2mdJZ1sdLW535DDUkq_Vm8Z572vhuxHxVZF9deKSdk,391
129
129
  vellum_ee/workflows/display/workflows/__init__.py,sha256=JTB9ObEV3l4gGGdtfBHwVJtTTKC22uj-a-XjTVwXCyA,148
130
- vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256=qek7zT4V6XoqmU3seVZwrNzGNVb0CKU4GmEK8VLrWbc,46651
130
+ vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256=DAGqGCg2WTBTNeO1akDpeCAJ5sAtUu65dnXsagnO6Jk,46937
131
131
  vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py,sha256=gxz76AeCqgAZ9D2lZeTiZzxY9eMgn3qOSfVgiqYcOh8,2028
132
132
  vellum_ee/workflows/display/workflows/tests/test_workflow_display.py,sha256=lg-c_3P3ldtqWq2VFsk_2Mkn3pVdXWuT59QpH7QwXVs,39764
133
133
  vellum_ee/workflows/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -154,14 +154,14 @@ vellum_ee/workflows/tests/local_workflow/workflow.py,sha256=A4qOzOPNwePYxWbcAgIP
154
154
  vellum_ee/workflows/tests/test_display_meta.py,sha256=PkXJVnMZs9GNooDkd59n4YTBAX3XGPQWeSSVbhehVFM,5112
155
155
  vellum_ee/workflows/tests/test_registry.py,sha256=B8xRIuEyLWfSqrYoPldNQXhKPfe50PllvtAZoI8-uPs,6066
156
156
  vellum_ee/workflows/tests/test_serialize_module.py,sha256=zleQTcGZa5_nzwu4zpFoqEHhk7pb64hGrhObR4anhPQ,4471
157
- vellum_ee/workflows/tests/test_server.py,sha256=DtQdVlRlfIvq0L9mSs0SncI0jHgDAq05HQCLj29aiZo,24728
157
+ vellum_ee/workflows/tests/test_server.py,sha256=RmLE2s9Cs7NejvWf7aPRNuAeEER-hGk-9Q9fYrn5wps,26013
158
158
  vellum_ee/workflows/tests/test_virtual_files.py,sha256=TJEcMR0v2S8CkloXNmCHA0QW0K6pYNGaIjraJz7sFvY,2762
159
159
  vellum/__init__.py,sha256=6dkyRHmIKuQPzL_z3QLVUrbkAF-HJKDhDSMCnf4ZsKw,49502
160
160
  vellum/client/README.md,sha256=flqu57ubZNTfpq60CdLtJC9gp4WEkyjb_n_eZ4OYf9w,6497
161
161
  vellum/client/__init__.py,sha256=rMnKRqL5-356SBc-rfm56MkO87PuAi2mtcfBszcJU1M,74316
162
162
  vellum/client/core/__init__.py,sha256=lTcqUPXcx4112yLDd70RAPeqq6tu3eFMe1pKOqkW9JQ,1562
163
163
  vellum/client/core/api_error.py,sha256=44vPoTyWN59gonCIZMdzw7M1uspygiLnr3GNFOoVL2Q,614
164
- vellum/client/core/client_wrapper.py,sha256=1YhfnMUVmjKkdmg9jR46ZTIffeCPF5RezCBeInW45hc,2840
164
+ vellum/client/core/client_wrapper.py,sha256=Xg-fX0-gnXrGKMqx9QTWnqutz2unMYFhgA34qo_877U,2840
165
165
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
166
166
  vellum/client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
167
167
  vellum/client/core/force_multipart.py,sha256=awxh5MtcRYe74ehY8U76jzv6fYM_w_D3Rur7KQQzSDk,429
@@ -1885,7 +1885,7 @@ vellum/workflows/integrations/vellum_integration_service.py,sha256=qhFoLzHlMli1P
1885
1885
  vellum/workflows/logging.py,sha256=_a217XogktV4Ncz6xKFz7WfYmZAzkfVRVuC0rWob8ls,437
1886
1886
  vellum/workflows/nodes/__init__.py,sha256=zymtc3_iW2rFmMR-sayTLuN6ZsAw8VnJweWPsjQk2-Q,1197
1887
1887
  vellum/workflows/nodes/bases/__init__.py,sha256=cniHuz_RXdJ4TQgD8CBzoiKDiPxg62ErdVpCbWICX64,58
1888
- vellum/workflows/nodes/bases/base.py,sha256=jitrO95AvJF2iCrqDgbNXZVLCx4LuApFk9Xx24_h7Bk,21653
1888
+ vellum/workflows/nodes/bases/base.py,sha256=u7NmYWhQZm91KZV53PGD-gpZvcWAItOme95TjZkHwDI,22094
1889
1889
  vellum/workflows/nodes/bases/base_adornment_node.py,sha256=hrgzuTetM4ynPd9YGHoK8Vwwn4XITi3aZZ_OCnQrq4Y,3433
1890
1890
  vellum/workflows/nodes/bases/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1891
1891
  vellum/workflows/nodes/bases/tests/test_base_adornment_node.py,sha256=fXZI9KqpS4XMBrBnIEkK3foHaBVvyHwYcQWWDKay7ic,1148
@@ -1966,13 +1966,13 @@ vellum/workflows/nodes/displayable/prompt_deployment_node/node.py,sha256=XeBn6d3
1966
1966
  vellum/workflows/nodes/displayable/prompt_deployment_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1967
1967
  vellum/workflows/nodes/displayable/prompt_deployment_node/tests/test_node.py,sha256=c_nuuqrwiIjgj4qIbVypfDuOc-3TlgO6CbXFqQl2Nqw,19725
1968
1968
  vellum/workflows/nodes/displayable/search_node/__init__.py,sha256=hpBpvbrDYf43DElRZFLzieSn8weXiwNiiNOJurERQbs,62
1969
- vellum/workflows/nodes/displayable/search_node/node.py,sha256=1dGCB1kb7MvX3fUJ5zP__Bh02mdPdRRsx_vwGyQFBVc,1981
1969
+ vellum/workflows/nodes/displayable/search_node/node.py,sha256=hbPsZhyXfq9dx0mfBKOyivluDmV2iXmDTT7loEyu9nI,2057
1970
1970
  vellum/workflows/nodes/displayable/search_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1971
- vellum/workflows/nodes/displayable/search_node/tests/test_node.py,sha256=WVZR3BI_CvxBG9hulv0-tcAc_gW5ozs0nH4uVNRJa2U,8863
1971
+ vellum/workflows/nodes/displayable/search_node/tests/test_node.py,sha256=YXgIIAJHVQxrfyJ0gxeJC0fAJaic10_zbqvsS8hyZSc,9368
1972
1972
  vellum/workflows/nodes/displayable/subworkflow_deployment_node/__init__.py,sha256=9yYM6001YZeqI1VOk1QuEM_yrffk_EdsO7qaPzINKds,92
1973
- vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py,sha256=SZIdk7aBmKKFgPYsxZaHIjT6p5W9HBYT1lj5yE4oax8,14169
1973
+ vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py,sha256=AHlW7aPDza1FuSR_9xONOe3tzDFDg8EIJtO1NJ7ILhk,14445
1974
1974
  vellum/workflows/nodes/displayable/subworkflow_deployment_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1975
- vellum/workflows/nodes/displayable/subworkflow_deployment_node/tests/test_node.py,sha256=c98nMPogZ6iN_pTvVUMTB3J72Hj--H-XVgvvRXhdSQE,19085
1975
+ vellum/workflows/nodes/displayable/subworkflow_deployment_node/tests/test_node.py,sha256=PII44speqT4fJvj60y_3KDAnH1L6Ivtq9R4BykY-X_A,19092
1976
1976
  vellum/workflows/nodes/displayable/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1977
1977
  vellum/workflows/nodes/displayable/tests/test_inline_text_prompt_node.py,sha256=E6PQ9OKcDlsJbWoxVKWgiAPgz9p59g1ONMgNggfOeiI,4868
1978
1978
  vellum/workflows/nodes/displayable/tests/test_search_node_error_handling.py,sha256=8aw8hDFL0ZXThvAa7yxrJN026EYGD4-Q1si3Phu9-_0,6307
@@ -1983,9 +1983,9 @@ vellum/workflows/nodes/displayable/tool_calling_node/node.py,sha256=rvCGtnCM1bFk
1983
1983
  vellum/workflows/nodes/displayable/tool_calling_node/state.py,sha256=CcBVb_YtwfSSka4ze678k6-qwmzMSfjfVP8_Y95feSo,302
1984
1984
  vellum/workflows/nodes/displayable/tool_calling_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1985
1985
  vellum/workflows/nodes/displayable/tool_calling_node/tests/test_composio_service.py,sha256=in1fbEz5x1tx3uKv9YXdvOncsHucNL8Ro6Go7lBuuOQ,8962
1986
- vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py,sha256=Idjtlly6GTotNa4isXJ23RxKzQA2oE10MOm793aipLA,13892
1986
+ vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py,sha256=uXz9tbPK6ikGlF8Tfx-qiPeF-8qq_E5ZbmCmOMWkz24,15506
1987
1987
  vellum/workflows/nodes/displayable/tool_calling_node/tests/test_utils.py,sha256=EmKFA-ELdTzlK0xMqWnuSZPoGNLYCwk6b0amTqirZo0,11305
1988
- vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=VfrW8OZfw6NNxY9xP_jhST8FaRX2xpl-eGNVR3YGClI,24519
1988
+ vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=sReCkqd2pwUQ6pX9xZ_zQh-Wza2_2eV7HYRAHYaAaHo,24580
1989
1989
  vellum/workflows/nodes/displayable/web_search_node/__init__.py,sha256=8FOnEP-n-U68cvxTlJW9wphIAGHq5aqjzLM-DoSSXnU,61
1990
1990
  vellum/workflows/nodes/displayable/web_search_node/node.py,sha256=NQYux2bOtuBF5E4tn-fXi5y3btURPRrNqMSM9MAZYI4,5091
1991
1991
  vellum/workflows/nodes/displayable/web_search_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -2025,7 +2025,7 @@ vellum/workflows/resolvers/resolver.py,sha256=3uEYscB_2PHTazc0Y9SzOe_yiQZhVLfey1
2025
2025
  vellum/workflows/resolvers/tests/test_resolver.py,sha256=PnUGzsulo1It_LjjhHsRNiILvvl5G_IaK8ZX56zKC28,6204
2026
2026
  vellum/workflows/resolvers/types.py,sha256=Hndhlk69g6EKLh_LYg5ILepW5U_h_BYNllfzhS9k8p4,237
2027
2027
  vellum/workflows/runner/__init__.py,sha256=i1iG5sAhtpdsrlvwgH6B-m49JsINkiWyPWs8vyT-bqM,72
2028
- vellum/workflows/runner/runner.py,sha256=nq-DsIZWLSofsMxZnDZZgV_MUod5Baz6mrEs7cgYPQY,45795
2028
+ vellum/workflows/runner/runner.py,sha256=250GCJFyqTX9q5gYhfo6AkiIgMScVfEp4KYofRweqEk,45455
2029
2029
  vellum/workflows/sandbox.py,sha256=mezSZmilR_fwR8164n8CEfzlMeQ55IqfapHp4ftImvQ,3212
2030
2030
  vellum/workflows/state/__init__.py,sha256=yUUdR-_Vl7UiixNDYQZ-GEM_kJI9dnOia75TtuNEsnE,60
2031
2031
  vellum/workflows/state/base.py,sha256=A8s0PC8UvFjPpkHDY6u-yIeb2KHjoAmu-GW-GYrDl0E,24654
@@ -2039,9 +2039,12 @@ vellum/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3
2039
2039
  vellum/workflows/tests/test_dataset_row.py,sha256=S8aIiYU9TRzJ8GTl5qCjnJ-fuHdxatHJFGLlKTVHPr4,4174
2040
2040
  vellum/workflows/tests/test_sandbox.py,sha256=JKwaluI-lODQo7Ek9sjDstjL_WTdSqUlVik6ZVTfVOA,1826
2041
2041
  vellum/workflows/tests/test_undefined.py,sha256=zMCVliCXVNLrlC6hEGyOWDnQADJ2g83yc5FIM33zuo8,353
2042
- vellum/workflows/triggers/__init__.py,sha256=yMSJHI_UbD_CnHJn2VHiX6qcdkyZ0BU8BS6fFQBbgZs,158
2042
+ vellum/workflows/triggers/__init__.py,sha256=kistj6wgRgYe3fO82_4giZ_xFkElfnsDGRKAbzxrGnc,249
2043
2043
  vellum/workflows/triggers/base.py,sha256=HIZkKPdb9GOYjpGzk2DoSqkxsUyjBnzmsLyhVTWm1iE,4857
2044
+ vellum/workflows/triggers/integration.py,sha256=_RyNrNnIBIa08FMHmLh2QEZPb-u5j75Bye_FbR82nkg,2043
2044
2045
  vellum/workflows/triggers/manual.py,sha256=PgbZ92gcK25yz6REXm98zWic1QBfhxLKfGCeHpZEUx4,1266
2046
+ vellum/workflows/triggers/tests/__init__.py,sha256=R8lag_iCRyulijHMK4e3Gf6YVB5NplfvwZeTkaRj8gQ,30
2047
+ vellum/workflows/triggers/tests/test_integration.py,sha256=5N7ejz4AFmgQAZlHGV466rzWyb79iLeXU-pAfx9o4TA,3213
2045
2048
  vellum/workflows/types/__init__.py,sha256=fZ3Xxly7YSsu4kCIYD5aYpYucNM97zTyInb9CA24mf0,102
2046
2049
  vellum/workflows/types/code_execution_node_wrappers.py,sha256=fewX9bqF_4TZuK-gZYIn12s31-k03vHMGRpvFAPm11Y,3206
2047
2050
  vellum/workflows/types/core.py,sha256=R7snCd7ci4tiRuHi5ALGh_5DIIF0T9eze3sf6EnJN-c,1126
@@ -2067,13 +2070,13 @@ vellum/workflows/utils/vellum_variables.py,sha256=X3lZn-EoWengRWBWRhTNW7hqbj7LkV
2067
2070
  vellum/workflows/utils/zip.py,sha256=HVg_YZLmBOTXKaDV3Xhaf3V6sYnfqqZXQ8CpuafkbPY,1181
2068
2071
  vellum/workflows/vellum_client.py,sha256=3iDR7VV_NgLSm1iZQCKDvrmfEaX1bOJiU15QrxyHpv0,1237
2069
2072
  vellum/workflows/workflows/__init__.py,sha256=KY45TqvavCCvXIkyCFMEc0dc6jTMOUci93U2DUrlZYc,66
2070
- vellum/workflows/workflows/base.py,sha256=Fb5Bw1ZdysIoex-P7gOqRKqHrpYgj3qYIdeFqv6_y1k,30061
2073
+ vellum/workflows/workflows/base.py,sha256=2XQMntFPXUO3p1js14TEKun4oNWRmZOzga-pUiv5vVk,30935
2071
2074
  vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnadGsrSZGa7t7LpJA,2008
2072
2075
  vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2073
2076
  vellum/workflows/workflows/tests/test_base_workflow.py,sha256=Boa-_m9ii2Qsa1RvVM-VYniF7zCpzGgEGy-OnPZkrHg,23941
2074
2077
  vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
2075
- vellum_ai-1.7.5.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
2076
- vellum_ai-1.7.5.dist-info/METADATA,sha256=ShJgnNyDKIuEsE07Cut9gBsA8vALEZ4XkdDUeuOn7Ms,5547
2077
- vellum_ai-1.7.5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
2078
- vellum_ai-1.7.5.dist-info/entry_points.txt,sha256=xVavzAKN4iF_NbmhWOlOkHluka0YLkbN_pFQ9pW3gLI,117
2079
- vellum_ai-1.7.5.dist-info/RECORD,,
2078
+ vellum_ai-1.7.6.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
2079
+ vellum_ai-1.7.6.dist-info/METADATA,sha256=Bk9FFFzHL7vS59oSmxxgX_8fOxREGwYmlaqSFXP6zr4,5547
2080
+ vellum_ai-1.7.6.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
2081
+ vellum_ai-1.7.6.dist-info/entry_points.txt,sha256=xVavzAKN4iF_NbmhWOlOkHluka0YLkbN_pFQ9pW3gLI,117
2082
+ vellum_ai-1.7.6.dist-info/RECORD,,
vellum_cli/push.py CHANGED
@@ -77,11 +77,7 @@ def push_command(
77
77
 
78
78
  logger.info(f"Loading workflow from {workflow_config.module}")
79
79
  resolved_workspace = workspace or workflow_config.workspace or DEFAULT_WORKSPACE_CONFIG.name
80
- workspace_config = (
81
- next((w for w in config.workspaces if w.name == resolved_workspace), DEFAULT_WORKSPACE_CONFIG)
82
- if workspace
83
- else DEFAULT_WORKSPACE_CONFIG
84
- )
80
+ workspace_config = next((w for w in config.workspaces if w.name == resolved_workspace), DEFAULT_WORKSPACE_CONFIG)
85
81
  api_key = os.getenv(workspace_config.api_key)
86
82
  if not api_key:
87
83
  raise ValueError(f"No API key value found in environment for workspace '{workspace_config.name}'.")
@@ -1134,3 +1134,89 @@ def test_push__deploy_stores_deployment_config_in_lock_file(mock_module, vellum_
1134
1134
  deployment_config_str = call_args["deployment_config"]
1135
1135
  deployment_config = json.loads(deployment_config_str)
1136
1136
  assert deployment_config["name"] == "test-push-deploy-stores-deployment-config-in-lock-file"
1137
+
1138
+
1139
+ def test_push__custom_workspace_in_lockfile__uses_custom_workspace_without_flag(mock_module, vellum_client_class):
1140
+ """
1141
+ Tests that push respects the custom workspace from the lockfile when no --workspace flag is provided.
1142
+ """
1143
+
1144
+ # GIVEN a workflow already configured in the lockfile with a custom workspace
1145
+ temp_dir = mock_module.temp_dir
1146
+ module = mock_module.module
1147
+ workflow_sandbox_id = mock_module.workflow_sandbox_id
1148
+ set_pyproject_toml = mock_module.set_pyproject_toml
1149
+
1150
+ # AND the lockfile has the workflow with a custom workspace
1151
+ with open(os.path.join(temp_dir, "vellum.lock.json"), "w") as f:
1152
+ json.dump(
1153
+ {
1154
+ "version": "1.0",
1155
+ "workflows": [
1156
+ {
1157
+ "module": module,
1158
+ "workflow_sandbox_id": workflow_sandbox_id,
1159
+ "workspace": "my_custom_workspace",
1160
+ "container_image_name": None,
1161
+ "container_image_tag": None,
1162
+ "deployments": [],
1163
+ "ignore": None,
1164
+ "target_directory": None,
1165
+ }
1166
+ ],
1167
+ "workspaces": [],
1168
+ },
1169
+ f,
1170
+ indent=2,
1171
+ )
1172
+
1173
+ # AND the custom workspace is defined in pyproject.toml
1174
+ set_pyproject_toml(
1175
+ {
1176
+ "workflows": [],
1177
+ "workspaces": [
1178
+ {
1179
+ "name": "my_custom_workspace",
1180
+ "api_key": "MY_CUSTOM_VELLUM_API_KEY",
1181
+ }
1182
+ ],
1183
+ }
1184
+ )
1185
+
1186
+ # AND the .env file has the custom api key
1187
+ with open(os.path.join(temp_dir, ".env"), "w") as f:
1188
+ f.write(
1189
+ """\
1190
+ VELLUM_API_KEY=abcdef123456
1191
+ MY_CUSTOM_VELLUM_API_KEY=custom-key-xyz
1192
+ """
1193
+ )
1194
+
1195
+ # AND a workflow exists in the module
1196
+ _ensure_workflow_py(temp_dir, module)
1197
+
1198
+ # AND the push API returns successfully
1199
+ vellum_client_class.return_value.workflows.push.return_value = WorkflowPushResponse(
1200
+ workflow_sandbox_id=workflow_sandbox_id,
1201
+ )
1202
+ vellum_client_class.return_value._client_wrapper._environment.default = "https://api.vellum.ai/v1"
1203
+
1204
+ # WHEN calling `vellum push` WITHOUT the --workspace flag
1205
+ runner = CliRunner()
1206
+ result = runner.invoke(cli_main, ["push", module])
1207
+
1208
+ # THEN it should succeed
1209
+ assert result.exit_code == 0, result.output
1210
+
1211
+ # AND the custom workspace API key should have been used
1212
+ vellum_client_class.assert_called_once_with(
1213
+ api_key="custom-key-xyz",
1214
+ environment=mock.ANY,
1215
+ api_version=None,
1216
+ )
1217
+
1218
+ with open(os.path.join(temp_dir, "vellum.lock.json")) as f:
1219
+ lock_file_content = json.load(f)
1220
+ assert len(lock_file_content["workflows"]) == 1
1221
+ assert lock_file_content["workflows"][0]["workspace"] == "my_custom_workspace"
1222
+ assert lock_file_content["workflows"][0]["workflow_sandbox_id"] == workflow_sandbox_id
@@ -3,8 +3,6 @@
3
3
  import pytest
4
4
  from typing import cast
5
5
 
6
- from deepdiff import DeepDiff
7
-
8
6
  from vellum.workflows import BaseWorkflow
9
7
  from vellum.workflows.inputs.base import BaseInputs
10
8
  from vellum.workflows.nodes.bases.base import BaseNode
@@ -42,27 +40,24 @@ def serialize(workflow_class) -> JsonObject:
42
40
 
43
41
 
44
42
  def test_manual_trigger_serialization():
45
- """Workflow with ManualTrigger serializes with trigger field."""
43
+ """Workflow with ManualTrigger serializes with triggers field."""
46
44
  result = serialize(create_workflow(ManualTrigger))
47
- workflow_raw_data = cast(JsonObject, result["workflow_raw_data"])
48
- trigger = cast(JsonObject, workflow_raw_data["trigger"])
45
+ triggers = cast(JsonArray, result["triggers"])
46
+
47
+ assert len(triggers) == 1
48
+ trigger = cast(JsonObject, triggers[0])
49
49
 
50
50
  assert trigger["type"] == "MANUAL"
51
- assert not DeepDiff(
52
- {
53
- "type": "MANUAL",
54
- "definition": {"name": "ManualTrigger", "module": ["vellum", "workflows", "triggers", "manual"]},
55
- },
56
- trigger,
57
- ignore_order=True,
58
- )
51
+ assert "id" in trigger
52
+ assert "attributes" in trigger
53
+ assert trigger["attributes"] == []
54
+ assert "definition" not in trigger
59
55
 
60
56
 
61
57
  def test_no_trigger_serialization():
62
- """Workflow without trigger has no trigger field."""
58
+ """Workflow without trigger has no triggers field."""
63
59
  result = serialize(create_workflow())
64
- workflow_raw_data = cast(JsonObject, result["workflow_raw_data"])
65
- assert "trigger" not in workflow_raw_data
60
+ assert "triggers" not in result
66
61
 
67
62
 
68
63
  def test_manual_trigger_multiple_entrypoints():
@@ -84,10 +79,12 @@ def test_manual_trigger_multiple_entrypoints():
84
79
  output_b = NodeB.Outputs.output
85
80
 
86
81
  result = serialize(MultiWorkflow)
82
+ triggers = cast(JsonArray, result["triggers"])
87
83
  workflow_data = cast(JsonObject, result["workflow_raw_data"])
88
- trigger = cast(JsonObject, workflow_data["trigger"])
89
84
  nodes = cast(JsonArray, workflow_data["nodes"])
90
85
 
86
+ assert len(triggers) == 1
87
+ trigger = cast(JsonObject, triggers[0])
91
88
  assert trigger["type"] == "MANUAL"
92
89
  assert len([n for n in nodes if cast(JsonObject, n)["type"] == "GENERIC"]) >= 2
93
90
 
@@ -98,8 +95,8 @@ def test_serialized_workflow_structure():
98
95
  workflow_raw_data = cast(JsonObject, result["workflow_raw_data"])
99
96
  definition = cast(JsonObject, workflow_raw_data["definition"])
100
97
 
101
- assert result.keys() == {"workflow_raw_data", "input_variables", "state_variables", "output_variables"}
102
- assert workflow_raw_data.keys() == {"nodes", "edges", "display_data", "definition", "output_values", "trigger"}
98
+ assert result.keys() == {"workflow_raw_data", "input_variables", "state_variables", "output_variables", "triggers"}
99
+ assert workflow_raw_data.keys() == {"nodes", "edges", "display_data", "definition", "output_values"}
103
100
  assert definition["name"] == "TestWorkflow"
104
101
 
105
102
 
@@ -410,7 +410,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
410
410
  self.display_context.add_error(e)
411
411
 
412
412
  # Serialize workflow-level trigger if present
413
- trigger_data: Optional[JsonObject] = self._serialize_workflow_trigger()
413
+ triggers: Optional[JsonArray] = self._serialize_workflow_trigger()
414
414
 
415
415
  workflow_raw_data: JsonObject = {
416
416
  "nodes": cast(JsonArray, nodes_dict_list),
@@ -423,22 +423,25 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
423
423
  "output_values": output_values,
424
424
  }
425
425
 
426
- if trigger_data is not None:
427
- workflow_raw_data["trigger"] = trigger_data
428
-
429
- return {
426
+ result: JsonObject = {
430
427
  "workflow_raw_data": workflow_raw_data,
431
428
  "input_variables": input_variables,
432
429
  "state_variables": state_variables,
433
430
  "output_variables": output_variables,
434
431
  }
435
432
 
436
- def _serialize_workflow_trigger(self) -> Optional[JsonObject]:
433
+ if triggers is not None:
434
+ result["triggers"] = triggers
435
+
436
+ return result
437
+
438
+ def _serialize_workflow_trigger(self) -> Optional[JsonArray]:
437
439
  """
438
440
  Serialize workflow-level trigger information.
439
441
 
440
442
  Returns:
441
- JsonObject with trigger data if a trigger is present, None otherwise
443
+ JsonArray with trigger data if a trigger is present, None otherwise.
444
+ Each trigger in the array has: id (UUID), type (str), attributes (list)
442
445
  """
443
446
  # Get all trigger edges from the workflow's subgraphs
444
447
  trigger_edges = []
@@ -469,13 +472,19 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
469
472
  f"Please add it to the trigger type mapping in get_trigger_type_mapping()."
470
473
  )
471
474
 
472
- return {
473
- "type": trigger_type.value,
474
- "definition": {
475
- "name": trigger_class.__name__,
476
- "module": cast(JsonArray, trigger_class.__module__.split(".")),
477
- },
478
- }
475
+ # Return as a list with a single trigger object matching Django schema
476
+ trigger_id = uuid4_from_hash(f"{trigger_class.__module__} | {trigger_class.__qualname__}")
477
+
478
+ return cast(
479
+ JsonArray,
480
+ [
481
+ {
482
+ "id": str(trigger_id),
483
+ "type": trigger_type.value,
484
+ "attributes": [],
485
+ }
486
+ ],
487
+ )
479
488
 
480
489
  def _serialize_edge_display_data(self, edge_display: EdgeDisplay) -> Optional[JsonObject]:
481
490
  """Serialize edge display data, returning None if no display data is present."""
@@ -536,10 +536,49 @@ class BrokenNode(BaseNode) # Missing colon
536
536
 
537
537
  # AND the error message should be user-friendly
538
538
  error_message = str(exc_info.value)
539
- assert "Failed to load workflow module:" in error_message
539
+ assert "Syntax Error raised while loading Workflow:" in error_message
540
540
  assert "invalid syntax" in error_message or "expected ':'" in error_message
541
541
 
542
542
 
543
+ def test_load_from_module__name_error_in_node_file():
544
+ """
545
+ Tests that a NameError in a node file raises WorkflowInitializationException with user-facing message.
546
+ """
547
+ # GIVEN a workflow module with a node file containing a NameError (undefined class reference)
548
+ files = {
549
+ "__init__.py": "",
550
+ "workflow.py": """\
551
+ from vellum.workflows import BaseWorkflow
552
+ from .nodes.broken_node import BrokenNode
553
+
554
+ class Workflow(BaseWorkflow):
555
+ graph = BrokenNode
556
+ """,
557
+ "nodes/__init__.py": "",
558
+ "nodes/broken_node.py": """\
559
+ from vellum.workflows.nodes import BaseNode
560
+
561
+ class BrokenNode(BaseNode):
562
+ some_attribute = UndefinedClass()
563
+ """,
564
+ }
565
+
566
+ namespace = str(uuid4())
567
+
568
+ # AND the virtual file loader is registered
569
+ sys.meta_path.append(VirtualFileFinder(files, namespace))
570
+
571
+ # WHEN we attempt to load the workflow
572
+ # THEN it should raise WorkflowInitializationException
573
+ with pytest.raises(WorkflowInitializationException) as exc_info:
574
+ BaseWorkflow.load_from_module(namespace)
575
+
576
+ # AND the error message should be user-friendly
577
+ error_message = str(exc_info.value)
578
+ assert "Invalid variable reference:" in error_message
579
+ assert "UndefinedClass" in error_message or "not defined" in error_message
580
+
581
+
543
582
  def test_serialize_module__tool_calling_node_with_single_tool():
544
583
  """Test that serialize_module works with a tool calling node that has a single tool."""
545
584