vellum-ai 1.5.0__py3-none-any.whl → 1.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. vellum/client/core/client_wrapper.py +2 -2
  2. vellum/workflows/descriptors/utils.py +3 -0
  3. vellum/workflows/emitters/vellum_emitter.py +4 -1
  4. vellum/workflows/integrations/__init__.py +5 -0
  5. vellum/workflows/integrations/tests/__init__.py +0 -0
  6. vellum/workflows/integrations/tests/test_vellum_integration_service.py +225 -0
  7. vellum/workflows/integrations/vellum_integration_service.py +96 -0
  8. vellum/workflows/nodes/bases/base.py +24 -3
  9. vellum/workflows/nodes/core/inline_subworkflow_node/node.py +5 -0
  10. vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +2 -5
  11. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +38 -4
  12. vellum/workflows/utils/functions.py +23 -17
  13. vellum/workflows/workflows/base.py +6 -2
  14. vellum/workflows/workflows/tests/test_base_workflow.py +45 -0
  15. {vellum_ai-1.5.0.dist-info → vellum_ai-1.5.1.dist-info}/METADATA +1 -1
  16. {vellum_ai-1.5.0.dist-info → vellum_ai-1.5.1.dist-info}/RECORD +36 -33
  17. vellum_ee/assets/node-definitions.json +376 -26
  18. vellum_ee/scripts/generate_node_definitions.py +1 -1
  19. vellum_ee/workflows/display/nodes/base_node_display.py +6 -3
  20. vellum_ee/workflows/display/nodes/vellum/api_node.py +4 -7
  21. vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py +19 -5
  22. vellum_ee/workflows/display/nodes/vellum/retry_node.py +2 -3
  23. vellum_ee/workflows/display/nodes/vellum/search_node.py +3 -6
  24. vellum_ee/workflows/display/nodes/vellum/templating_node.py +1 -1
  25. vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py +2 -3
  26. vellum_ee/workflows/display/nodes/vellum/try_node.py +3 -4
  27. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_attributes_serialization.py +5 -11
  28. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_outputs_serialization.py +1 -1
  29. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_ports_serialization.py +1 -1
  30. vellum_ee/workflows/display/types.py +3 -3
  31. vellum_ee/workflows/display/utils/expressions.py +10 -3
  32. vellum_ee/workflows/display/utils/vellum.py +9 -2
  33. vellum_ee/workflows/display/workflows/base_workflow_display.py +2 -2
  34. {vellum_ai-1.5.0.dist-info → vellum_ai-1.5.1.dist-info}/LICENSE +0 -0
  35. {vellum_ai-1.5.0.dist-info → vellum_ai-1.5.1.dist-info}/WHEEL +0 -0
  36. {vellum_ai-1.5.0.dist-info → vellum_ai-1.5.1.dist-info}/entry_points.txt +0 -0
@@ -27,10 +27,10 @@ class BaseClientWrapper:
27
27
 
28
28
  def get_headers(self) -> typing.Dict[str, str]:
29
29
  headers: typing.Dict[str, str] = {
30
- "User-Agent": "vellum-ai/1.5.0",
30
+ "User-Agent": "vellum-ai/1.5.1",
31
31
  "X-Fern-Language": "Python",
32
32
  "X-Fern-SDK-Name": "vellum-ai",
33
- "X-Fern-SDK-Version": "1.5.0",
33
+ "X-Fern-SDK-Version": "1.5.1",
34
34
  **(self.get_custom_headers() or {}),
35
35
  }
36
36
  if self._api_version is not None:
@@ -33,6 +33,9 @@ def resolve_value(
33
33
  from the `_T` generic.
34
34
  """
35
35
 
36
+ if memo is not None and path in memo:
37
+ return cast(_T, memo[path])
38
+
36
39
  if inspect.isclass(value):
37
40
  return cast(_T, value)
38
41
 
@@ -126,7 +126,10 @@ class VellumEmitter(BaseWorkflowEmitter):
126
126
  return
127
127
 
128
128
  client = self._context.vellum_client
129
- request_options = RequestOptions(timeout_in_seconds=self._timeout, max_retries=self._max_retries)
129
+ if self._timeout is not None:
130
+ request_options = RequestOptions(timeout_in_seconds=int(self._timeout), max_retries=self._max_retries)
131
+ else:
132
+ request_options = RequestOptions(max_retries=self._max_retries)
130
133
 
131
134
  client.events.create(
132
135
  # The API accepts a ClientWorkflowEvent but our SDK emits an SDKWorkflowEvent. These shapes are
@@ -0,0 +1,5 @@
1
+ from .composio_service import ComposioService
2
+ from .mcp_service import MCPService
3
+ from .vellum_integration_service import VellumIntegrationService
4
+
5
+ __all__ = ["ComposioService", "MCPService", "VellumIntegrationService"]
File without changes
@@ -0,0 +1,225 @@
1
+ import pytest
2
+ from unittest import mock
3
+
4
+ from vellum.workflows.exceptions import NodeException
5
+ from vellum.workflows.integrations.vellum_integration_service import VellumIntegrationService
6
+
7
+
8
+ def test_vellum_integration_service_get_tool_definition_success():
9
+ """Test that tool definitions are successfully retrieved from Vellum API"""
10
+ with mock.patch(
11
+ "vellum.workflows.integrations.vellum_integration_service.create_vellum_client"
12
+ ) as mock_create_client:
13
+ # GIVEN a mock Vellum client configured to return a tool definition
14
+ mock_client = mock.MagicMock()
15
+ mock_create_client.return_value = mock_client
16
+
17
+ mock_response = mock.MagicMock()
18
+ mock_response.name = "GITHUB_CREATE_AN_ISSUE"
19
+ mock_response.description = "Create a new issue in a GitHub repository"
20
+ mock_response.parameters = {
21
+ "type": "object",
22
+ "properties": {
23
+ "repo": {"type": "string", "description": "Repository name"},
24
+ "title": {"type": "string", "description": "Issue title"},
25
+ "body": {"type": "string", "description": "Issue body"},
26
+ },
27
+ "required": ["repo", "title"],
28
+ }
29
+ mock_response.provider = "COMPOSIO"
30
+
31
+ mock_client.integrations.retrieve_integration_tool_definition.return_value = mock_response
32
+
33
+ # WHEN we request a tool definition
34
+ service = VellumIntegrationService()
35
+ result = service.get_tool_definition(
36
+ integration="GITHUB",
37
+ provider="COMPOSIO",
38
+ tool_name="GITHUB_CREATE_AN_ISSUE",
39
+ )
40
+
41
+ # THEN the tool definition should be returned with all expected fields
42
+ assert result["name"] == "GITHUB_CREATE_AN_ISSUE"
43
+ assert result["description"] == "Create a new issue in a GitHub repository"
44
+ assert result["provider"] == "COMPOSIO"
45
+ assert "properties" in result["parameters"]
46
+ assert "repo" in result["parameters"]["properties"]
47
+
48
+ # AND the API should have been called with the correct parameters
49
+ mock_client.integrations.retrieve_integration_tool_definition.assert_called_once_with(
50
+ integration="GITHUB",
51
+ provider="COMPOSIO",
52
+ tool_name="GITHUB_CREATE_AN_ISSUE",
53
+ )
54
+
55
+
56
+ def test_vellum_integration_service_get_tool_definition_api_error():
57
+ """Test that API errors are properly handled when retrieving tool definitions"""
58
+ with mock.patch(
59
+ "vellum.workflows.integrations.vellum_integration_service.create_vellum_client"
60
+ ) as mock_create_client:
61
+ # GIVEN a mock client that raises an exception when retrieving tool definitions
62
+ mock_client = mock.MagicMock()
63
+ mock_create_client.return_value = mock_client
64
+
65
+ mock_client.integrations.retrieve_integration_tool_definition.side_effect = Exception("Tool not found")
66
+
67
+ # WHEN we attempt to get a tool definition for an invalid tool
68
+ service = VellumIntegrationService()
69
+
70
+ # THEN it should raise a NodeException with appropriate error message
71
+ with pytest.raises(NodeException) as exc_info:
72
+ service.get_tool_definition(
73
+ integration="GITHUB",
74
+ provider="COMPOSIO",
75
+ tool_name="INVALID_TOOL",
76
+ )
77
+
78
+ assert "Failed to retrieve tool definition" in str(exc_info.value)
79
+ assert "Tool not found" in str(exc_info.value)
80
+
81
+
82
+ def test_vellum_integration_service_execute_tool_success():
83
+ """Test that tools are successfully executed via Vellum API"""
84
+ with mock.patch(
85
+ "vellum.workflows.integrations.vellum_integration_service.create_vellum_client"
86
+ ) as mock_create_client:
87
+ # GIVEN a mock client configured to return successful execution results
88
+ mock_client = mock.MagicMock()
89
+ mock_create_client.return_value = mock_client
90
+
91
+ mock_response = mock.MagicMock()
92
+ mock_response.data = {
93
+ "success": True,
94
+ "issue_id": 123,
95
+ "issue_url": "https://github.com/user/repo/issues/123",
96
+ }
97
+
98
+ mock_client.integrations.execute_integration_tool.return_value = mock_response
99
+
100
+ # WHEN we execute a tool with valid arguments
101
+ service = VellumIntegrationService()
102
+ result = service.execute_tool(
103
+ integration="GITHUB",
104
+ provider="COMPOSIO",
105
+ tool_name="GITHUB_CREATE_AN_ISSUE",
106
+ arguments={
107
+ "repo": "user/repo",
108
+ "title": "Test Issue",
109
+ "body": "Test body",
110
+ },
111
+ )
112
+
113
+ # THEN the execution result should contain expected data
114
+ assert result["success"] is True
115
+ assert result["issue_id"] == 123
116
+ assert result["issue_url"] == "https://github.com/user/repo/issues/123"
117
+
118
+ # AND the API should have been called with correct parameters
119
+ mock_client.integrations.execute_integration_tool.assert_called_once_with(
120
+ integration="GITHUB",
121
+ provider="COMPOSIO",
122
+ tool_name="GITHUB_CREATE_AN_ISSUE",
123
+ arguments={
124
+ "repo": "user/repo",
125
+ "title": "Test Issue",
126
+ "body": "Test body",
127
+ },
128
+ )
129
+
130
+
131
+ def test_vellum_integration_service_execute_tool_api_error():
132
+ """Test that execution errors are properly handled"""
133
+ with mock.patch(
134
+ "vellum.workflows.integrations.vellum_integration_service.create_vellum_client"
135
+ ) as mock_create_client:
136
+ # GIVEN a mock client that raises an exception during tool execution
137
+ mock_client = mock.MagicMock()
138
+ mock_create_client.return_value = mock_client
139
+
140
+ mock_client.integrations.execute_integration_tool.side_effect = Exception("Authentication failed")
141
+
142
+ # WHEN we attempt to execute a tool that encounters an error
143
+ service = VellumIntegrationService()
144
+
145
+ # THEN it should raise a NodeException with appropriate error message
146
+ with pytest.raises(NodeException) as exc_info:
147
+ service.execute_tool(
148
+ integration="GITHUB",
149
+ provider="COMPOSIO",
150
+ tool_name="GITHUB_CREATE_AN_ISSUE",
151
+ arguments={"repo": "user/repo"},
152
+ )
153
+
154
+ assert "Failed to execute tool" in str(exc_info.value)
155
+ assert "Authentication failed" in str(exc_info.value)
156
+
157
+
158
+ def test_vellum_integration_service_execute_tool_empty_response():
159
+ """Test that empty response data is handled gracefully"""
160
+ with mock.patch(
161
+ "vellum.workflows.integrations.vellum_integration_service.create_vellum_client"
162
+ ) as mock_create_client:
163
+ # GIVEN a mock client that returns an empty response
164
+ mock_client = mock.MagicMock()
165
+ mock_create_client.return_value = mock_client
166
+
167
+ mock_response = mock.MagicMock()
168
+ mock_response.data = {}
169
+
170
+ mock_client.integrations.execute_integration_tool.return_value = mock_response
171
+
172
+ # WHEN we execute a tool that returns empty data
173
+ service = VellumIntegrationService()
174
+ result = service.execute_tool(
175
+ integration="SLACK",
176
+ provider="COMPOSIO",
177
+ tool_name="SLACK_SEND_MESSAGE",
178
+ arguments={
179
+ "channel": "#general",
180
+ "message": "Hello, world!",
181
+ },
182
+ )
183
+
184
+ # THEN an empty dictionary should be returned without errors
185
+ assert result == {}
186
+
187
+
188
+ def test_vellum_integration_service_multiple_tool_executions():
189
+ """Test that the service handles multiple sequential tool executions"""
190
+ with mock.patch(
191
+ "vellum.workflows.integrations.vellum_integration_service.create_vellum_client"
192
+ ) as mock_create_client:
193
+ # GIVEN a mock client configured to return different responses for each call
194
+ mock_client = mock.MagicMock()
195
+ mock_create_client.return_value = mock_client
196
+
197
+ responses = [
198
+ mock.MagicMock(data={"result": "first"}),
199
+ mock.MagicMock(data={"result": "second"}),
200
+ ]
201
+ mock_client.integrations.execute_integration_tool.side_effect = responses
202
+
203
+ # WHEN we execute multiple tools in sequence
204
+ service = VellumIntegrationService()
205
+
206
+ result1 = service.execute_tool(
207
+ integration="GITHUB",
208
+ provider="COMPOSIO",
209
+ tool_name="TOOL_1",
210
+ arguments={"arg": "val1"},
211
+ )
212
+
213
+ result2 = service.execute_tool(
214
+ integration="SLACK",
215
+ provider="COMPOSIO",
216
+ tool_name="TOOL_2",
217
+ arguments={"arg": "val2"},
218
+ )
219
+
220
+ # THEN each tool execution should return its respective result
221
+ assert result1["result"] == "first"
222
+ assert result2["result"] == "second"
223
+
224
+ # AND the API should have been called twice
225
+ assert mock_client.integrations.execute_integration_tool.call_count == 2
@@ -0,0 +1,96 @@
1
+ from typing import Any, Dict
2
+
3
+ from vellum.workflows.errors.types import WorkflowErrorCode
4
+ from vellum.workflows.exceptions import NodeException
5
+ from vellum.workflows.vellum_client import create_vellum_client
6
+
7
+
8
+ class VellumIntegrationService:
9
+ """Vellum Integration Service for retrieving tool definitions and executing tools.
10
+
11
+ This service uses the native Vellum client SDK to interact with Vellum's integration
12
+ endpoints, providing functionality similar to ComposioService but using Vellum's
13
+ own integration infrastructure.
14
+ """
15
+
16
+ def __init__(self) -> None:
17
+ """Initialize the VellumIntegrationService with a Vellum client."""
18
+ self._client = create_vellum_client()
19
+
20
+ def get_tool_definition(
21
+ self,
22
+ integration: str,
23
+ provider: str,
24
+ tool_name: str,
25
+ ) -> Dict[str, Any]:
26
+ """Retrieve a tool definition from Vellum integrations.
27
+
28
+ Args:
29
+ integration: The integration name (e.g., "GITHUB", "SLACK")
30
+ provider: The integration provider name (e.g., "COMPOSIO")
31
+ tool_name: The tool's unique name as specified by the provider
32
+
33
+ Returns:
34
+ Dict containing the tool definition with name, description, and parameters
35
+
36
+ Raises:
37
+ NodeException: If the tool definition cannot be retrieved
38
+ """
39
+ try:
40
+ response = self._client.integrations.retrieve_integration_tool_definition(
41
+ integration=integration,
42
+ provider=provider,
43
+ tool_name=tool_name,
44
+ )
45
+
46
+ # Convert the response to a dict format matching what's expected
47
+ return {
48
+ "name": response.name,
49
+ "description": response.description,
50
+ "parameters": response.parameters,
51
+ "provider": response.provider,
52
+ }
53
+ except Exception as e:
54
+ error_message = f"Failed to retrieve tool definition for {tool_name}: {str(e)}"
55
+ raise NodeException(
56
+ message=error_message,
57
+ code=WorkflowErrorCode.INVALID_OUTPUTS,
58
+ ) from e
59
+
60
+ def execute_tool(
61
+ self,
62
+ integration: str,
63
+ provider: str,
64
+ tool_name: str,
65
+ arguments: Dict[str, Any],
66
+ ) -> Dict[str, Any]:
67
+ """Execute a tool through Vellum integrations.
68
+
69
+ Args:
70
+ integration: The integration name (e.g., "GITHUB", "SLACK")
71
+ provider: The integration provider name (e.g., "COMPOSIO")
72
+ tool_name: The tool's unique name as specified by the provider
73
+ arguments: Arguments to pass to the tool
74
+
75
+ Returns:
76
+ Dict containing the execution result data
77
+
78
+ Raises:
79
+ NodeException: If the tool execution fails
80
+ """
81
+ try:
82
+ response = self._client.integrations.execute_integration_tool(
83
+ integration=integration,
84
+ provider=provider,
85
+ tool_name=tool_name,
86
+ arguments=arguments,
87
+ )
88
+
89
+ # Return the data from the response
90
+ return response.data
91
+ except Exception as e:
92
+ error_message = f"Failed to execute tool {tool_name}: {str(e)}"
93
+ raise NodeException(
94
+ message=error_message,
95
+ code=WorkflowErrorCode.INVALID_OUTPUTS,
96
+ ) from e
@@ -439,6 +439,7 @@ class BaseNode(Generic[StateType], ABC, BaseExecutable, metaclass=BaseNodeMeta):
439
439
  *,
440
440
  state: Optional[StateType] = None,
441
441
  context: Optional[WorkflowContext] = None,
442
+ inputs: Optional[Dict[str, Any]] = None,
442
443
  ):
443
444
  if state:
444
445
  self.state = state
@@ -459,7 +460,27 @@ class BaseNode(Generic[StateType], ABC, BaseExecutable, metaclass=BaseNodeMeta):
459
460
  self.state = state_type()
460
461
 
461
462
  self._context = context or WorkflowContext()
462
- inputs: Dict[str, Any] = {}
463
+ inputs_memo: Dict[str, Any] = inputs.copy() if inputs else {}
464
+ if inputs:
465
+ for input_key, input_value in inputs.items():
466
+ path_parts = input_key.split(".")
467
+ dir_path = path_parts[:-1]
468
+ leaf = path_parts[-1]
469
+ base: Any = self.__class__
470
+
471
+ for attr_name in dir_path:
472
+ if hasattr(base, attr_name):
473
+ base = getattr(base, attr_name)
474
+ elif isinstance(base, dict) and attr_name in base:
475
+ base = base[attr_name]
476
+ else:
477
+ break
478
+
479
+ if isinstance(base, dict):
480
+ base[leaf] = input_value
481
+ else:
482
+ setattr(base, leaf, input_value)
483
+
463
484
  for descriptor in self.__class__:
464
485
  if not descriptor.instance:
465
486
  continue
@@ -468,12 +489,12 @@ class BaseNode(Generic[StateType], ABC, BaseExecutable, metaclass=BaseNodeMeta):
468
489
  # We don't want to resolve attributes that are _meant_ to be descriptors
469
490
  continue
470
491
 
471
- resolved_value = resolve_value(descriptor.instance, self.state, path=descriptor.name, memo=inputs)
492
+ resolved_value = resolve_value(descriptor.instance, self.state, path=descriptor.name, memo=inputs_memo)
472
493
  setattr(self, descriptor.name, resolved_value)
473
494
 
474
495
  # We only want to store the attributes that were actually set as inputs, not every attribute that exists.
475
496
  all_inputs = {}
476
- for key, value in inputs.items():
497
+ for key, value in inputs_memo.items():
477
498
  path_parts = key.split(".")
478
499
  node_attribute_descriptor = getattr(self.__class__, path_parts[0])
479
500
  inputs_key = reduce(lambda acc, part: acc[part], path_parts[1:], node_attribute_descriptor)
@@ -130,6 +130,11 @@ class InlineSubworkflowNode(
130
130
  )
131
131
 
132
132
  def _compile_subworkflow_inputs(self) -> InputsType:
133
+ if self.subworkflow is None:
134
+ raise NodeException(
135
+ message="InlineSubworkflowNode requires a subworkflow to be defined",
136
+ code=WorkflowErrorCode.INVALID_INPUTS,
137
+ )
133
138
  inputs_class = self.subworkflow.get_inputs_class()
134
139
  try:
135
140
  if self.subworkflow_inputs is undefined:
@@ -57,6 +57,7 @@ from vellum.workflows.utils.functions import (
57
57
  compile_function_definition,
58
58
  compile_inline_workflow_function_definition,
59
59
  compile_mcp_tool_definition,
60
+ compile_vellum_integration_tool_definition,
60
61
  compile_workflow_deployment_function_definition,
61
62
  get_mcp_tool_name,
62
63
  )
@@ -151,11 +152,7 @@ class BaseInlinePromptNode(BasePromptNode[StateType], Generic[StateType]):
151
152
  elif isinstance(function, ComposioToolDefinition):
152
153
  normalized_functions.append(compile_composio_tool_definition(function))
153
154
  elif isinstance(function, VellumIntegrationToolDefinition):
154
- # TODO: Implement compile_vellum_integration_tool_definition
155
- raise NotImplementedError(
156
- "VellumIntegrationToolDefinition support coming soon. "
157
- "This will be implemented when compile_vellum_integration_tool_definition is created."
158
- )
155
+ normalized_functions.append(compile_vellum_integration_tool_definition(function))
159
156
  elif isinstance(function, MCPServer):
160
157
  tool_definitions = compile_mcp_tool_definition(function)
161
158
  for tool_def in tool_definitions:
@@ -21,6 +21,7 @@ from vellum.workflows.expressions.concat import ConcatExpression
21
21
  from vellum.workflows.inputs import BaseInputs
22
22
  from vellum.workflows.integrations.composio_service import ComposioService
23
23
  from vellum.workflows.integrations.mcp_service import MCPService
24
+ from vellum.workflows.integrations.vellum_integration_service import VellumIntegrationService
24
25
  from vellum.workflows.nodes.bases import BaseNode
25
26
  from vellum.workflows.nodes.core.inline_subworkflow_node.node import InlineSubworkflowNode
26
27
  from vellum.workflows.nodes.displayable.inline_prompt_node.node import InlinePromptNode
@@ -263,6 +264,34 @@ class MCPNode(BaseNode[ToolCallingState], FunctionCallNodeMixin):
263
264
  yield from []
264
265
 
265
266
 
267
+ class VellumIntegrationNode(BaseNode[ToolCallingState], FunctionCallNodeMixin):
268
+ """Node that executes a Vellum Integration tool with function call output."""
269
+
270
+ vellum_integration_tool: VellumIntegrationToolDefinition
271
+
272
+ def run(self) -> Iterator[BaseOutput]:
273
+ arguments = self._extract_function_arguments()
274
+
275
+ try:
276
+ vellum_service = VellumIntegrationService()
277
+ result = vellum_service.execute_tool(
278
+ integration=self.vellum_integration_tool.integration,
279
+ provider=self.vellum_integration_tool.provider.value,
280
+ tool_name=self.vellum_integration_tool.name,
281
+ arguments=arguments,
282
+ )
283
+ except Exception as e:
284
+ raise NodeException(
285
+ message=f"Error executing Vellum Integration tool '{self.vellum_integration_tool.name}': {str(e)}",
286
+ code=WorkflowErrorCode.NODE_EXECUTION,
287
+ )
288
+
289
+ # Add result to chat history
290
+ self._add_function_result_to_chat_history(result, self.state)
291
+
292
+ yield from []
293
+
294
+
266
295
  class ElseNode(BaseNode[ToolCallingState]):
267
296
  """Node that executes when no function conditions match."""
268
297
 
@@ -465,11 +494,16 @@ def create_function_node(
465
494
  )
466
495
  return node
467
496
  elif isinstance(function, VellumIntegrationToolDefinition):
468
- # TODO: Implement VellumIntegrationNode
469
- raise NotImplementedError(
470
- "VellumIntegrationToolDefinition support coming soon. "
471
- "This will be implemented when the VellumIntegrationService is created."
497
+ node = type(
498
+ f"VellumIntegrationNode_{function.name}",
499
+ (VellumIntegrationNode,),
500
+ {
501
+ "vellum_integration_tool": function,
502
+ "function_call_output": tool_prompt_node.Outputs.results,
503
+ "__module__": __name__,
504
+ },
472
505
  )
506
+ return node
473
507
  elif is_workflow_class(function):
474
508
  function.is_dynamic = True
475
509
  node = type(
@@ -10,6 +10,7 @@ from vellum import Vellum
10
10
  from vellum.client.types.function_definition import FunctionDefinition
11
11
  from vellum.workflows.integrations.composio_service import ComposioService
12
12
  from vellum.workflows.integrations.mcp_service import MCPService
13
+ from vellum.workflows.integrations.vellum_integration_service import VellumIntegrationService
13
14
  from vellum.workflows.types.definition import (
14
15
  ComposioToolDefinition,
15
16
  DeploymentDefinition,
@@ -95,22 +96,21 @@ def compile_annotation(annotation: Optional[Any], defs: dict[str, Any]) -> dict:
95
96
  defs[annotation.__name__] = {"type": "object", "properties": properties, "required": required}
96
97
  return {"$ref": f"#/$defs/{annotation.__name__}"}
97
98
 
98
- if issubclass(annotation, BaseModel):
99
+ if inspect.isclass(annotation) and issubclass(annotation, BaseModel):
99
100
  if annotation.__name__ not in defs:
100
101
  properties = {}
101
102
  required = []
102
- for field_name, field in annotation.model_fields.items():
103
- # Mypy is incorrect here, the `annotation` attribute is defined on `FieldInfo`
104
- field_annotation = field.annotation # type: ignore[attr-defined]
105
- properties[field_name] = compile_annotation(field_annotation, defs)
103
+ for field_name, field_info in annotation.model_fields.items():
104
+ # field_info is a FieldInfo object which has an annotation attribute
105
+ properties[field_name] = compile_annotation(field_info.annotation, defs)
106
106
 
107
- if hasattr(field, "description") and field.description is not None:
108
- properties[field_name]["description"] = field.description # type: ignore[attr-defined]
107
+ if field_info.description is not None:
108
+ properties[field_name]["description"] = field_info.description
109
109
 
110
- if field.default is PydanticUndefined:
110
+ if field_info.default is PydanticUndefined:
111
111
  required.append(field_name)
112
112
  else:
113
- properties[field_name]["default"] = _compile_default_value(field.default)
113
+ properties[field_name]["default"] = _compile_default_value(field_info.default)
114
114
  defs[annotation.__name__] = {"type": "object", "properties": properties, "required": required}
115
115
 
116
116
  return {"$ref": f"#/$defs/{annotation.__name__}"}
@@ -326,20 +326,26 @@ def compile_composio_tool_definition(tool_def: ComposioToolDefinition) -> Functi
326
326
  def compile_vellum_integration_tool_definition(tool_def: VellumIntegrationToolDefinition) -> FunctionDefinition:
327
327
  """Compile a VellumIntegrationToolDefinition into a FunctionDefinition.
328
328
 
329
- TODO: Implement when VellumIntegrationService is created.
330
-
331
329
  Args:
332
330
  tool_def: The VellumIntegrationToolDefinition to compile
333
331
 
334
332
  Returns:
335
333
  FunctionDefinition with tool parameters and description
336
334
  """
337
- # TODO: Implement when VellumIntegrationService is available
338
- # This will eventually use VellumIntegrationService to fetch tool details
339
- raise NotImplementedError(
340
- "VellumIntegrationToolDefinition compilation coming soon. "
341
- "This will be implemented when the VellumIntegrationService is created."
342
- )
335
+ try:
336
+ service = VellumIntegrationService()
337
+ tool_details = service.get_tool_definition(
338
+ integration=tool_def.integration, provider=tool_def.provider.value, tool_name=tool_def.name
339
+ )
340
+
341
+ return FunctionDefinition(
342
+ name=tool_def.name,
343
+ description=tool_details.get("description", tool_def.description),
344
+ parameters=tool_details.get("parameters", {}),
345
+ )
346
+ except Exception:
347
+ # Fallback for service failures
348
+ return FunctionDefinition(name=tool_def.name, description=tool_def.description, parameters={})
343
349
 
344
350
 
345
351
  def use_tool_inputs(**inputs):
@@ -565,10 +565,14 @@ class BaseWorkflow(Generic[InputsType, StateType], BaseExecutable, metaclass=_Ba
565
565
  # https://app.shortcut.com/vellum/story/4327
566
566
  pass
567
567
 
568
- def run_node(self, node: Type[BaseNode]) -> Generator[NodeEvent, None, None]:
568
+ def run_node(
569
+ self, node: Type[BaseNode], *, inputs: Optional[Dict[str, Any]] = None
570
+ ) -> Generator[NodeEvent, None, None]:
569
571
  runner = WorkflowRunner(self)
570
572
  span_id = uuid4()
571
- return runner.run_node(node=node(state=self.get_default_state(), context=self._context), span_id=span_id)
573
+ node_instance = node(state=self.get_default_state(), context=self._context, inputs=inputs)
574
+
575
+ return runner.run_node(node=node_instance, span_id=span_id)
572
576
 
573
577
  @classmethod
574
578
  @lru_cache
@@ -738,3 +738,48 @@ def test_base_workflow__run_node_emits_correct_events():
738
738
  assert isinstance(events[1], NodeExecutionFulfilledEvent)
739
739
  assert events[0].span_id == events[1].span_id
740
740
  assert events[1].body.outputs.result == "test_output"
741
+
742
+
743
+ def test_base_workflow__run_node_with_inputs():
744
+ """Test that run_node method accepts and applies inputs parameter with dot notation."""
745
+
746
+ class TestInputs(BaseInputs):
747
+ pass
748
+
749
+ class TestState(BaseState):
750
+ pass
751
+
752
+ class TestCodeNode(BaseNode[TestState]):
753
+ code_inputs: dict = {"input1": "default_value", "input2": "default_value2"}
754
+ test_attr: str = "default"
755
+ another_attr: str = "not_overridden"
756
+
757
+ class Outputs(BaseNode.Outputs):
758
+ result: str
759
+
760
+ def run(self) -> "TestCodeNode.Outputs":
761
+ return self.Outputs(
762
+ result=f"{self.test_attr}_{self.code_inputs['input1']}_{self.code_inputs['input2']}_{self.another_attr}"
763
+ )
764
+
765
+ class TestWorkflow(BaseWorkflow[TestInputs, TestState]):
766
+ graph = TestCodeNode
767
+
768
+ class Outputs(BaseWorkflow.Outputs):
769
+ result: str
770
+
771
+ workflow = TestWorkflow()
772
+
773
+ inputs_data = {"test_attr": "overridden", "code_inputs.input1": "overridden_value"}
774
+
775
+ # WHEN we run the node with inputs
776
+ events = list(workflow.run_node(node=TestCodeNode, inputs=inputs_data))
777
+
778
+ # THEN the node should execute with the overridden attributes
779
+ assert len(events) == 2
780
+ assert isinstance(events[0], NodeExecutionInitiatedEvent)
781
+ assert isinstance(events[1], NodeExecutionFulfilledEvent)
782
+
783
+ # AND the execution result should use the overridden and non-overridden attributes
784
+ fulfilled_event = events[1]
785
+ assert fulfilled_event.body.outputs.result == "overridden_overridden_value_default_value2_not_overridden"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 1.5.0
3
+ Version: 1.5.1
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0