vellum-ai 0.14.88__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/__init__.py +1 -1
- vellum/client/README.md +33 -10
- vellum/client/__init__.py +141 -40
- vellum/client/core/client_wrapper.py +18 -5
- vellum/client/reference.md +241 -318
- vellum/client/resources/ad_hoc/client.py +76 -24
- vellum/client/resources/container_images/client.py +14 -6
- vellum/client/resources/deployments/client.py +28 -4
- vellum/client/resources/document_indexes/client.py +30 -38
- vellum/client/resources/documents/client.py +8 -30
- vellum/client/resources/folder_entities/client.py +4 -0
- vellum/client/resources/metric_definitions/client.py +16 -4
- vellum/client/resources/ml_models/client.py +2 -0
- vellum/client/resources/organizations/client.py +2 -0
- vellum/client/resources/prompts/client.py +26 -6
- vellum/client/resources/release_reviews/client.py +2 -0
- vellum/client/resources/sandboxes/client.py +10 -10
- vellum/client/resources/test_suite_runs/client.py +6 -0
- vellum/client/resources/test_suites/client.py +96 -58
- vellum/client/resources/workflow_deployments/client.py +16 -0
- vellum/client/resources/workflow_sandboxes/client.py +4 -0
- vellum/client/resources/workflows/client.py +0 -30
- vellum/client/resources/workspace_secrets/client.py +4 -0
- vellum/client/resources/workspaces/client.py +2 -0
- vellum/evaluations/resources.py +5 -5
- vellum/workflows/events/node.py +1 -1
- vellum/workflows/events/types.py +1 -1
- vellum/workflows/events/workflow.py +1 -1
- vellum/workflows/nodes/bases/tests/test_base_node.py +1 -1
- vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +1 -1
- vellum/workflows/nodes/displayable/bases/search_node.py +1 -1
- vellum/workflows/nodes/displayable/code_execution_node/node.py +1 -1
- vellum/workflows/nodes/displayable/guardrail_node/node.py +1 -1
- vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py +1 -1
- vellum/workflows/nodes/displayable/tool_calling_node/node.py +1 -5
- vellum/workflows/nodes/displayable/tool_calling_node/state.py +9 -0
- vellum/workflows/nodes/displayable/tool_calling_node/utils.py +133 -155
- vellum/workflows/nodes/experimental/__init__.py +2 -2
- vellum/workflows/state/base.py +1 -1
- vellum/workflows/state/delta.py +1 -1
- {vellum_ai-0.14.88.dist-info → vellum_ai-1.0.0.dist-info}/METADATA +1 -1
- {vellum_ai-0.14.88.dist-info → vellum_ai-1.0.0.dist-info}/RECORD +59 -61
- vellum_cli/config.py +1 -1
- vellum_cli/push.py +1 -1
- vellum_ee/workflows/display/base.py +0 -54
- vellum_ee/workflows/display/nodes/vellum/conditional_node.py +16 -2
- vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py +3 -3
- vellum_ee/workflows/display/tests/test_base_workflow_display.py +2 -4
- vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/conftest.py +2 -2
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_conditional_node_serialization.py +11 -11
- vellum_ee/workflows/display/vellum.py +2 -128
- vellum_ee/workflows/display/workflows/__init__.py +0 -1
- vellum_ee/workflows/display/workflows/base_workflow_display.py +1 -1
- vellum_ee/workflows/tests/local_workflow/display/nodes/final_output.py +1 -1
- vellum_ee/workflows/tests/local_workflow/display/nodes/templating_node.py +1 -1
- vellum_ee/workflows/tests/local_workflow/display/workflow.py +11 -14
- vellum/workflows/nodes/experimental/tool_calling_node/__init__.py +0 -3
- vellum/workflows/nodes/experimental/tool_calling_node/node.py +0 -3
- vellum_ee/workflows/display/workflows/vellum_workflow_display.py +0 -9
- {vellum_ai-0.14.88.dist-info → vellum_ai-1.0.0.dist-info}/LICENSE +0 -0
- {vellum_ai-0.14.88.dist-info → vellum_ai-1.0.0.dist-info}/WHEEL +0 -0
- {vellum_ai-0.14.88.dist-info → vellum_ai-1.0.0.dist-info}/entry_points.txt +0 -0
@@ -1,40 +1,35 @@
|
|
1
1
|
import json
|
2
|
-
from typing import Any, Iterator, List, Optional, Type, cast
|
2
|
+
from typing import Any, Callable, Iterator, List, Optional, Type, cast
|
3
3
|
|
4
4
|
from pydash import snake_case
|
5
5
|
|
6
6
|
from vellum import ChatMessage, PromptBlock
|
7
7
|
from vellum.client.types.function_call_chat_message_content import FunctionCallChatMessageContent
|
8
8
|
from vellum.client.types.function_call_chat_message_content_value import FunctionCallChatMessageContentValue
|
9
|
+
from vellum.client.types.prompt_output import PromptOutput
|
9
10
|
from vellum.client.types.string_chat_message_content import StringChatMessageContent
|
10
11
|
from vellum.client.types.variable_prompt_block import VariablePromptBlock
|
11
|
-
from vellum.workflows.context import execution_context, get_parent_context
|
12
12
|
from vellum.workflows.errors.types import WorkflowErrorCode
|
13
|
-
from vellum.workflows.events.workflow import is_workflow_event
|
14
13
|
from vellum.workflows.exceptions import NodeException
|
14
|
+
from vellum.workflows.inputs import BaseInputs
|
15
15
|
from vellum.workflows.nodes.bases import BaseNode
|
16
|
+
from vellum.workflows.nodes.core.inline_subworkflow_node.node import InlineSubworkflowNode
|
16
17
|
from vellum.workflows.nodes.displayable.inline_prompt_node.node import InlinePromptNode
|
17
18
|
from vellum.workflows.nodes.displayable.subworkflow_deployment_node.node import SubworkflowDeploymentNode
|
18
|
-
from vellum.workflows.
|
19
|
+
from vellum.workflows.nodes.displayable.tool_calling_node.state import ToolCallingState
|
20
|
+
from vellum.workflows.outputs.base import BaseOutput
|
19
21
|
from vellum.workflows.ports.port import Port
|
20
22
|
from vellum.workflows.references.lazy import LazyReference
|
21
|
-
from vellum.workflows.state
|
23
|
+
from vellum.workflows.state import BaseState
|
22
24
|
from vellum.workflows.state.encoder import DefaultStateEncoder
|
23
25
|
from vellum.workflows.types.core import EntityInputsInterface, MergeBehavior, Tool
|
24
26
|
from vellum.workflows.types.definition import DeploymentDefinition
|
25
27
|
from vellum.workflows.types.generics import is_workflow_class
|
26
|
-
from vellum.workflows.workflows.event_filters import all_workflow_event_filter
|
27
28
|
|
28
29
|
CHAT_HISTORY_VARIABLE = "chat_history"
|
29
30
|
|
30
31
|
|
31
|
-
class
|
32
|
-
"""Node that executes a specific function."""
|
33
|
-
|
34
|
-
pass
|
35
|
-
|
36
|
-
|
37
|
-
class ToolRouterNode(InlinePromptNode):
|
32
|
+
class ToolRouterNode(InlinePromptNode[ToolCallingState]):
|
38
33
|
max_prompt_iterations: Optional[int] = 5
|
39
34
|
|
40
35
|
class Trigger(InlinePromptNode.Trigger):
|
@@ -76,6 +71,118 @@ class ToolRouterNode(InlinePromptNode):
|
|
76
71
|
yield output
|
77
72
|
|
78
73
|
|
74
|
+
class DynamicSubworkflowDeploymentNode(SubworkflowDeploymentNode[ToolCallingState]):
|
75
|
+
"""Node that executes a deployment definition with function call output."""
|
76
|
+
|
77
|
+
function_call_output: List[PromptOutput]
|
78
|
+
|
79
|
+
def run(self) -> Iterator[BaseOutput]:
|
80
|
+
if self.function_call_output and len(self.function_call_output) > 0:
|
81
|
+
function_call = self.function_call_output[0]
|
82
|
+
if function_call.type == "FUNCTION_CALL" and function_call.value is not None:
|
83
|
+
arguments = function_call.value.arguments
|
84
|
+
else:
|
85
|
+
arguments = {}
|
86
|
+
else:
|
87
|
+
arguments = {}
|
88
|
+
|
89
|
+
# Mypy doesn't like instance assignments of class attributes. It's safe in our case tho bc it's what
|
90
|
+
# we do in the `__init__` method. Long term, instead of the function_call_output attribute above, we
|
91
|
+
# want to do:
|
92
|
+
# ```python
|
93
|
+
# subworkflow_inputs = tool_router_node.Outputs.results[0]['value']['arguments'].if_(
|
94
|
+
# tool_router_node.Outputs.results[0]['type'].equals('FUNCTION_CALL'),
|
95
|
+
# {},
|
96
|
+
# )
|
97
|
+
# ```
|
98
|
+
self.subworkflow_inputs = arguments # type:ignore[misc]
|
99
|
+
|
100
|
+
# Call the parent run method to execute the subworkflow
|
101
|
+
outputs = {}
|
102
|
+
for output in super().run():
|
103
|
+
if output.is_fulfilled:
|
104
|
+
outputs[output.name] = output.value
|
105
|
+
yield output
|
106
|
+
|
107
|
+
# Add the result to the chat history
|
108
|
+
self.state.chat_history.append(
|
109
|
+
ChatMessage(
|
110
|
+
role="FUNCTION",
|
111
|
+
content=StringChatMessageContent(value=json.dumps(outputs, cls=DefaultStateEncoder)),
|
112
|
+
)
|
113
|
+
)
|
114
|
+
|
115
|
+
|
116
|
+
class DynamicInlineSubworkflowNode(InlineSubworkflowNode[ToolCallingState, BaseInputs, BaseState]):
|
117
|
+
"""Node that executes an inline subworkflow with function call output."""
|
118
|
+
|
119
|
+
function_call_output: List[PromptOutput]
|
120
|
+
|
121
|
+
def run(self) -> Iterator[BaseOutput]:
|
122
|
+
if self.function_call_output and len(self.function_call_output) > 0:
|
123
|
+
function_call = self.function_call_output[0]
|
124
|
+
if function_call.type == "FUNCTION_CALL" and function_call.value is not None:
|
125
|
+
arguments = function_call.value.arguments
|
126
|
+
else:
|
127
|
+
arguments = {}
|
128
|
+
else:
|
129
|
+
arguments = {}
|
130
|
+
|
131
|
+
self.subworkflow_inputs = arguments # type: ignore[misc]
|
132
|
+
|
133
|
+
# Call the parent run method to execute the subworkflow with proper streaming
|
134
|
+
outputs = {}
|
135
|
+
|
136
|
+
for output in super().run():
|
137
|
+
if output.is_fulfilled:
|
138
|
+
outputs[output.name] = output.value
|
139
|
+
yield output
|
140
|
+
|
141
|
+
# Add the result to the chat history
|
142
|
+
self.state.chat_history.append(
|
143
|
+
ChatMessage(
|
144
|
+
role="FUNCTION",
|
145
|
+
content=StringChatMessageContent(value=json.dumps(outputs, cls=DefaultStateEncoder)),
|
146
|
+
)
|
147
|
+
)
|
148
|
+
|
149
|
+
|
150
|
+
class FunctionNode(BaseNode[ToolCallingState]):
|
151
|
+
"""Node that executes a regular Python function with function call output."""
|
152
|
+
|
153
|
+
function_call_output: List[PromptOutput]
|
154
|
+
function_definition: Callable[..., Any]
|
155
|
+
|
156
|
+
def run(self) -> Iterator[BaseOutput]:
|
157
|
+
if self.function_call_output and len(self.function_call_output) > 0:
|
158
|
+
function_call = self.function_call_output[0]
|
159
|
+
if function_call.type == "FUNCTION_CALL" and function_call.value is not None:
|
160
|
+
arguments = function_call.value.arguments
|
161
|
+
else:
|
162
|
+
arguments = {}
|
163
|
+
else:
|
164
|
+
arguments = {}
|
165
|
+
|
166
|
+
try:
|
167
|
+
result = self.function_definition(**arguments)
|
168
|
+
except Exception as e:
|
169
|
+
function_name = self.function_definition.__name__
|
170
|
+
raise NodeException(
|
171
|
+
message=f"Error executing function '{function_name}': {str(e)}",
|
172
|
+
code=WorkflowErrorCode.NODE_EXECUTION,
|
173
|
+
)
|
174
|
+
|
175
|
+
# Add the result to the chat history
|
176
|
+
self.state.chat_history.append(
|
177
|
+
ChatMessage(
|
178
|
+
role="FUNCTION",
|
179
|
+
content=StringChatMessageContent(value=json.dumps(result, cls=DefaultStateEncoder)),
|
180
|
+
)
|
181
|
+
)
|
182
|
+
|
183
|
+
yield from []
|
184
|
+
|
185
|
+
|
79
186
|
def create_tool_router_node(
|
80
187
|
ml_model: str,
|
81
188
|
blocks: List[PromptBlock],
|
@@ -147,7 +254,7 @@ def create_tool_router_node(
|
|
147
254
|
def create_function_node(
|
148
255
|
function: Tool,
|
149
256
|
tool_router_node: Type[ToolRouterNode],
|
150
|
-
) -> Type[
|
257
|
+
) -> Type[BaseNode]:
|
151
258
|
"""
|
152
259
|
Create a FunctionNode class for a given function.
|
153
260
|
|
@@ -162,48 +269,13 @@ def create_function_node(
|
|
162
269
|
deployment = function.deployment_id or function.deployment_name
|
163
270
|
release_tag = function.release_tag
|
164
271
|
|
165
|
-
def execute_workflow_deployment_function(self) -> BaseNode.Outputs:
|
166
|
-
function_call_output = self.state.meta.node_outputs.get(tool_router_node.Outputs.results)
|
167
|
-
if function_call_output and len(function_call_output) > 0:
|
168
|
-
function_call = function_call_output[0]
|
169
|
-
arguments = function_call.value.arguments
|
170
|
-
else:
|
171
|
-
arguments = {}
|
172
|
-
|
173
|
-
subworkflow_node = type(
|
174
|
-
f"DynamicSubworkflowNode_{deployment}",
|
175
|
-
(SubworkflowDeploymentNode,),
|
176
|
-
{
|
177
|
-
"deployment": deployment,
|
178
|
-
"release_tag": release_tag,
|
179
|
-
"subworkflow_inputs": arguments,
|
180
|
-
"__module__": __name__,
|
181
|
-
},
|
182
|
-
)
|
183
|
-
|
184
|
-
node_instance = subworkflow_node(
|
185
|
-
context=WorkflowContext.create_from(self._context),
|
186
|
-
state=self.state,
|
187
|
-
)
|
188
|
-
|
189
|
-
outputs = {}
|
190
|
-
for output in node_instance.run():
|
191
|
-
outputs[output.name] = output.value
|
192
|
-
|
193
|
-
self.state.chat_history.append(
|
194
|
-
ChatMessage(
|
195
|
-
role="FUNCTION",
|
196
|
-
content=StringChatMessageContent(value=json.dumps(outputs, cls=DefaultStateEncoder)),
|
197
|
-
)
|
198
|
-
)
|
199
|
-
|
200
|
-
return self.Outputs()
|
201
|
-
|
202
272
|
node = type(
|
203
|
-
f"
|
204
|
-
(
|
273
|
+
f"DynamicSubworkflowDeploymentNode_{deployment}",
|
274
|
+
(DynamicSubworkflowDeploymentNode,),
|
205
275
|
{
|
206
|
-
"
|
276
|
+
"deployment": deployment,
|
277
|
+
"release_tag": release_tag,
|
278
|
+
"function_call_output": tool_router_node.Outputs.results,
|
207
279
|
"__module__": __name__,
|
208
280
|
},
|
209
281
|
)
|
@@ -211,117 +283,23 @@ def create_function_node(
|
|
211
283
|
return node
|
212
284
|
|
213
285
|
elif is_workflow_class(function):
|
214
|
-
# Create a class-level wrapper that calls the original function
|
215
|
-
def execute_inline_workflow_function(self) -> BaseNode.Outputs:
|
216
|
-
function_call_output = self.state.meta.node_outputs.get(tool_router_node.Outputs.results)
|
217
|
-
if function_call_output and len(function_call_output) > 0:
|
218
|
-
function_call = function_call_output[0]
|
219
|
-
arguments = function_call.value.arguments
|
220
|
-
else:
|
221
|
-
arguments = {}
|
222
|
-
|
223
|
-
# Call the function based on its type
|
224
|
-
inputs_instance = function.get_inputs_class()(**arguments)
|
225
|
-
|
226
|
-
with execution_context(parent_context=get_parent_context()):
|
227
|
-
workflow = function(
|
228
|
-
parent_state=self.state,
|
229
|
-
context=WorkflowContext.create_from(self._context),
|
230
|
-
)
|
231
|
-
subworkflow_stream = workflow.stream(
|
232
|
-
inputs=inputs_instance,
|
233
|
-
event_filter=all_workflow_event_filter,
|
234
|
-
node_output_mocks=self._context._get_all_node_output_mocks(),
|
235
|
-
)
|
236
|
-
|
237
|
-
outputs: Optional[BaseOutputs] = None
|
238
|
-
exception: Optional[NodeException] = None
|
239
|
-
|
240
|
-
for event in subworkflow_stream:
|
241
|
-
self._context._emit_subworkflow_event(event)
|
242
|
-
if exception:
|
243
|
-
continue
|
244
|
-
|
245
|
-
if not is_workflow_event(event):
|
246
|
-
continue
|
247
|
-
if event.workflow_definition != function:
|
248
|
-
continue
|
249
|
-
|
250
|
-
if event.name == "workflow.execution.fulfilled":
|
251
|
-
outputs = event.outputs
|
252
|
-
elif event.name == "workflow.execution.rejected":
|
253
|
-
exception = NodeException.of(event.error)
|
254
|
-
elif event.name == "workflow.execution.paused":
|
255
|
-
exception = NodeException(
|
256
|
-
code=WorkflowErrorCode.INVALID_OUTPUTS,
|
257
|
-
message="Subworkflow unexpectedly paused",
|
258
|
-
)
|
259
|
-
|
260
|
-
if exception:
|
261
|
-
raise exception
|
262
|
-
|
263
|
-
if outputs is None:
|
264
|
-
raise NodeException(
|
265
|
-
message="Expected to receive outputs from inline subworkflow",
|
266
|
-
code=WorkflowErrorCode.INVALID_OUTPUTS,
|
267
|
-
)
|
268
|
-
|
269
|
-
result = outputs
|
270
|
-
|
271
|
-
self.state.chat_history.append(
|
272
|
-
ChatMessage(
|
273
|
-
role="FUNCTION",
|
274
|
-
content=StringChatMessageContent(value=json.dumps(result, cls=DefaultStateEncoder)),
|
275
|
-
)
|
276
|
-
)
|
277
|
-
|
278
|
-
return self.Outputs()
|
279
|
-
|
280
|
-
# Create BaseNode for workflow functions
|
281
286
|
node = type(
|
282
|
-
f"
|
283
|
-
(
|
287
|
+
f"DynamicInlineSubworkflowNode_{function.__name__}",
|
288
|
+
(DynamicInlineSubworkflowNode,),
|
284
289
|
{
|
285
|
-
"
|
290
|
+
"subworkflow": function,
|
291
|
+
"function_call_output": tool_router_node.Outputs.results,
|
286
292
|
"__module__": __name__,
|
287
293
|
},
|
288
294
|
)
|
289
295
|
else:
|
290
|
-
# For regular functions,
|
291
|
-
def execute_regular_function(self) -> BaseNode.Outputs:
|
292
|
-
# Get the function call from the tool router output
|
293
|
-
function_call_output = self.state.meta.node_outputs.get(tool_router_node.Outputs.results)
|
294
|
-
if function_call_output and len(function_call_output) > 0:
|
295
|
-
function_call = function_call_output[0]
|
296
|
-
arguments = function_call.value.arguments
|
297
|
-
else:
|
298
|
-
arguments = {}
|
299
|
-
|
300
|
-
# Call the function directly
|
301
|
-
try:
|
302
|
-
result = function(**arguments)
|
303
|
-
except Exception as e:
|
304
|
-
raise NodeException(
|
305
|
-
message=f"Error executing function '{function.__name__}': {str(e)}",
|
306
|
-
code=WorkflowErrorCode.NODE_EXECUTION,
|
307
|
-
)
|
308
|
-
|
309
|
-
# Add the result to the chat history
|
310
|
-
self.state.chat_history.append(
|
311
|
-
ChatMessage(
|
312
|
-
role="FUNCTION",
|
313
|
-
content=StringChatMessageContent(value=json.dumps(result, cls=DefaultStateEncoder)),
|
314
|
-
)
|
315
|
-
)
|
316
|
-
|
317
|
-
return self.Outputs()
|
318
|
-
|
319
|
-
# Create BaseNode for regular functions
|
296
|
+
# For regular functions, use FunctionNode
|
320
297
|
node = type(
|
321
298
|
f"FunctionNode_{function.__name__}",
|
322
299
|
(FunctionNode,),
|
323
300
|
{
|
324
|
-
"
|
301
|
+
"function_definition": lambda self, **kwargs: function(**kwargs),
|
302
|
+
"function_call_output": tool_router_node.Outputs.results,
|
325
303
|
"__module__": __name__,
|
326
304
|
},
|
327
305
|
)
|
@@ -1,3 +1,3 @@
|
|
1
|
-
from
|
1
|
+
from .openai_chat_completion_node import OpenAIChatCompletionNode
|
2
2
|
|
3
|
-
__all__ = ["
|
3
|
+
__all__ = ["OpenAIChatCompletionNode"]
|
vellum/workflows/state/base.py
CHANGED
@@ -27,7 +27,7 @@ from typing_extensions import dataclass_transform
|
|
27
27
|
from pydantic import GetCoreSchemaHandler, ValidationInfo, field_serializer, field_validator
|
28
28
|
from pydantic_core import core_schema
|
29
29
|
|
30
|
-
from vellum.core.pydantic_utilities import UniversalBaseModel
|
30
|
+
from vellum.client.core.pydantic_utilities import UniversalBaseModel
|
31
31
|
from vellum.utils.uuid import is_valid_uuid
|
32
32
|
from vellum.workflows.constants import undefined
|
33
33
|
from vellum.workflows.inputs.base import BaseInputs
|
vellum/workflows/state/delta.py
CHANGED