vellum-ai 0.14.89__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. vellum/__init__.py +7 -1
  2. vellum/client/__init__.py +2 -2
  3. vellum/client/core/client_wrapper.py +2 -2
  4. vellum/client/types/__init__.py +6 -0
  5. vellum/client/types/organization_limit_config.py +24 -0
  6. vellum/client/types/organization_read.py +2 -0
  7. vellum/client/types/quota.py +21 -0
  8. vellum/client/types/vembda_service_tier_enum.py +5 -0
  9. vellum/client/types/workflow_execution_actual.py +1 -1
  10. vellum/evaluations/resources.py +5 -5
  11. vellum/types/organization_limit_config.py +3 -0
  12. vellum/types/quota.py +3 -0
  13. vellum/types/vembda_service_tier_enum.py +3 -0
  14. vellum/workflows/events/node.py +1 -1
  15. vellum/workflows/events/types.py +1 -1
  16. vellum/workflows/events/workflow.py +1 -1
  17. vellum/workflows/nodes/bases/tests/test_base_node.py +1 -1
  18. vellum/workflows/nodes/displayable/api_node/node.py +1 -0
  19. vellum/workflows/nodes/displayable/api_node/tests/test_api_node.py +30 -0
  20. vellum/workflows/nodes/displayable/bases/api_node/node.py +27 -10
  21. vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +1 -1
  22. vellum/workflows/nodes/displayable/bases/search_node.py +1 -1
  23. vellum/workflows/nodes/displayable/code_execution_node/node.py +1 -1
  24. vellum/workflows/nodes/displayable/guardrail_node/node.py +1 -1
  25. vellum/workflows/nodes/displayable/subworkflow_deployment_node/node.py +1 -1
  26. vellum/workflows/nodes/displayable/tool_calling_node/node.py +5 -0
  27. vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py +3 -0
  28. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +86 -114
  29. vellum/workflows/nodes/experimental/__init__.py +2 -2
  30. vellum/workflows/state/base.py +1 -1
  31. vellum/workflows/state/delta.py +1 -1
  32. {vellum_ai-0.14.89.dist-info → vellum_ai-1.0.1.dist-info}/METADATA +1 -1
  33. {vellum_ai-0.14.89.dist-info → vellum_ai-1.0.1.dist-info}/RECORD +59 -55
  34. vellum_cli/config.py +1 -1
  35. vellum_cli/push.py +1 -1
  36. vellum_cli/tests/test_ping.py +1 -0
  37. vellum_ee/workflows/display/base.py +0 -54
  38. vellum_ee/workflows/display/nodes/base_node_display.py +24 -0
  39. vellum_ee/workflows/display/nodes/vellum/api_node.py +20 -1
  40. vellum_ee/workflows/display/nodes/vellum/conditional_node.py +16 -2
  41. vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +0 -26
  42. vellum_ee/workflows/display/nodes/vellum/tests/test_api_node.py +31 -0
  43. vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py +3 -3
  44. vellum_ee/workflows/display/tests/test_base_workflow_display.py +2 -4
  45. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/conftest.py +2 -2
  46. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_api_node_serialization.py +10 -0
  47. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_conditional_node_serialization.py +11 -11
  48. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_serialization.py +21 -0
  49. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py +21 -0
  50. vellum_ee/workflows/display/vellum.py +2 -128
  51. vellum_ee/workflows/display/workflows/__init__.py +0 -1
  52. vellum_ee/workflows/display/workflows/base_workflow_display.py +15 -3
  53. vellum_ee/workflows/display/workflows/tests/test_workflow_display.py +127 -0
  54. vellum_ee/workflows/tests/local_workflow/display/nodes/final_output.py +1 -1
  55. vellum_ee/workflows/tests/local_workflow/display/nodes/templating_node.py +1 -1
  56. vellum_ee/workflows/tests/local_workflow/display/workflow.py +11 -14
  57. vellum/workflows/nodes/experimental/tool_calling_node/__init__.py +0 -3
  58. vellum/workflows/nodes/experimental/tool_calling_node/node.py +0 -3
  59. vellum_ee/workflows/display/workflows/vellum_workflow_display.py +0 -9
  60. {vellum_ai-0.14.89.dist-info → vellum_ai-1.0.1.dist-info}/LICENSE +0 -0
  61. {vellum_ai-0.14.89.dist-info → vellum_ai-1.0.1.dist-info}/WHEEL +0 -0
  62. {vellum_ai-0.14.89.dist-info → vellum_ai-1.0.1.dist-info}/entry_points.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  import json
2
- from typing import Any, Iterator, List, Optional, Type, cast
2
+ from typing import Any, Callable, Iterator, List, Optional, Type, cast
3
3
 
4
4
  from pydash import snake_case
5
5
 
@@ -7,35 +7,29 @@ from vellum import ChatMessage, PromptBlock
7
7
  from vellum.client.types.function_call_chat_message_content import FunctionCallChatMessageContent
8
8
  from vellum.client.types.function_call_chat_message_content_value import FunctionCallChatMessageContentValue
9
9
  from vellum.client.types.prompt_output import PromptOutput
10
+ from vellum.client.types.prompt_parameters import PromptParameters
10
11
  from vellum.client.types.string_chat_message_content import StringChatMessageContent
11
12
  from vellum.client.types.variable_prompt_block import VariablePromptBlock
12
- from vellum.workflows.context import execution_context, get_parent_context
13
13
  from vellum.workflows.errors.types import WorkflowErrorCode
14
- from vellum.workflows.events.workflow import is_workflow_event
15
14
  from vellum.workflows.exceptions import NodeException
15
+ from vellum.workflows.inputs import BaseInputs
16
16
  from vellum.workflows.nodes.bases import BaseNode
17
+ from vellum.workflows.nodes.core.inline_subworkflow_node.node import InlineSubworkflowNode
17
18
  from vellum.workflows.nodes.displayable.inline_prompt_node.node import InlinePromptNode
18
19
  from vellum.workflows.nodes.displayable.subworkflow_deployment_node.node import SubworkflowDeploymentNode
19
20
  from vellum.workflows.nodes.displayable.tool_calling_node.state import ToolCallingState
20
- from vellum.workflows.outputs.base import BaseOutput, BaseOutputs
21
+ from vellum.workflows.outputs.base import BaseOutput
21
22
  from vellum.workflows.ports.port import Port
22
23
  from vellum.workflows.references.lazy import LazyReference
23
- from vellum.workflows.state.context import WorkflowContext
24
+ from vellum.workflows.state import BaseState
24
25
  from vellum.workflows.state.encoder import DefaultStateEncoder
25
26
  from vellum.workflows.types.core import EntityInputsInterface, MergeBehavior, Tool
26
27
  from vellum.workflows.types.definition import DeploymentDefinition
27
28
  from vellum.workflows.types.generics import is_workflow_class
28
- from vellum.workflows.workflows.event_filters import all_workflow_event_filter
29
29
 
30
30
  CHAT_HISTORY_VARIABLE = "chat_history"
31
31
 
32
32
 
33
- class FunctionNode(BaseNode):
34
- """Node that executes a specific function."""
35
-
36
- pass
37
-
38
-
39
33
  class ToolRouterNode(InlinePromptNode[ToolCallingState]):
40
34
  max_prompt_iterations: Optional[int] = 5
41
35
 
@@ -120,11 +114,82 @@ class DynamicSubworkflowDeploymentNode(SubworkflowDeploymentNode[ToolCallingStat
120
114
  )
121
115
 
122
116
 
117
+ class DynamicInlineSubworkflowNode(InlineSubworkflowNode[ToolCallingState, BaseInputs, BaseState]):
118
+ """Node that executes an inline subworkflow with function call output."""
119
+
120
+ function_call_output: List[PromptOutput]
121
+
122
+ def run(self) -> Iterator[BaseOutput]:
123
+ if self.function_call_output and len(self.function_call_output) > 0:
124
+ function_call = self.function_call_output[0]
125
+ if function_call.type == "FUNCTION_CALL" and function_call.value is not None:
126
+ arguments = function_call.value.arguments
127
+ else:
128
+ arguments = {}
129
+ else:
130
+ arguments = {}
131
+
132
+ self.subworkflow_inputs = arguments # type: ignore[misc]
133
+
134
+ # Call the parent run method to execute the subworkflow with proper streaming
135
+ outputs = {}
136
+
137
+ for output in super().run():
138
+ if output.is_fulfilled:
139
+ outputs[output.name] = output.value
140
+ yield output
141
+
142
+ # Add the result to the chat history
143
+ self.state.chat_history.append(
144
+ ChatMessage(
145
+ role="FUNCTION",
146
+ content=StringChatMessageContent(value=json.dumps(outputs, cls=DefaultStateEncoder)),
147
+ )
148
+ )
149
+
150
+
151
+ class FunctionNode(BaseNode[ToolCallingState]):
152
+ """Node that executes a regular Python function with function call output."""
153
+
154
+ function_call_output: List[PromptOutput]
155
+ function_definition: Callable[..., Any]
156
+
157
+ def run(self) -> Iterator[BaseOutput]:
158
+ if self.function_call_output and len(self.function_call_output) > 0:
159
+ function_call = self.function_call_output[0]
160
+ if function_call.type == "FUNCTION_CALL" and function_call.value is not None:
161
+ arguments = function_call.value.arguments
162
+ else:
163
+ arguments = {}
164
+ else:
165
+ arguments = {}
166
+
167
+ try:
168
+ result = self.function_definition(**arguments)
169
+ except Exception as e:
170
+ function_name = self.function_definition.__name__
171
+ raise NodeException(
172
+ message=f"Error executing function '{function_name}': {str(e)}",
173
+ code=WorkflowErrorCode.NODE_EXECUTION,
174
+ )
175
+
176
+ # Add the result to the chat history
177
+ self.state.chat_history.append(
178
+ ChatMessage(
179
+ role="FUNCTION",
180
+ content=StringChatMessageContent(value=json.dumps(result, cls=DefaultStateEncoder)),
181
+ )
182
+ )
183
+
184
+ yield from []
185
+
186
+
123
187
  def create_tool_router_node(
124
188
  ml_model: str,
125
189
  blocks: List[PromptBlock],
126
190
  functions: List[Tool],
127
191
  prompt_inputs: Optional[EntityInputsInterface],
192
+ parameters: PromptParameters,
128
193
  max_prompt_iterations: Optional[int] = None,
129
194
  ) -> Type[ToolRouterNode]:
130
195
  if functions and len(functions) > 0:
@@ -179,6 +244,7 @@ def create_tool_router_node(
179
244
  "blocks": blocks,
180
245
  "functions": functions,
181
246
  "prompt_inputs": prompt_inputs,
247
+ "parameters": parameters,
182
248
  "max_prompt_iterations": max_prompt_iterations,
183
249
  "Ports": Ports,
184
250
  "__module__": __name__,
@@ -191,7 +257,7 @@ def create_tool_router_node(
191
257
  def create_function_node(
192
258
  function: Tool,
193
259
  tool_router_node: Type[ToolRouterNode],
194
- ) -> Type[FunctionNode]:
260
+ ) -> Type[BaseNode]:
195
261
  """
196
262
  Create a FunctionNode class for a given function.
197
263
 
@@ -220,117 +286,23 @@ def create_function_node(
220
286
  return node
221
287
 
222
288
  elif is_workflow_class(function):
223
- # Create a class-level wrapper that calls the original function
224
- def execute_inline_workflow_function(self) -> BaseNode.Outputs:
225
- function_call_output = self.state.meta.node_outputs.get(tool_router_node.Outputs.results)
226
- if function_call_output and len(function_call_output) > 0:
227
- function_call = function_call_output[0]
228
- arguments = function_call.value.arguments
229
- else:
230
- arguments = {}
231
-
232
- # Call the function based on its type
233
- inputs_instance = function.get_inputs_class()(**arguments)
234
-
235
- with execution_context(parent_context=get_parent_context()):
236
- workflow = function(
237
- parent_state=self.state,
238
- context=WorkflowContext.create_from(self._context),
239
- )
240
- subworkflow_stream = workflow.stream(
241
- inputs=inputs_instance,
242
- event_filter=all_workflow_event_filter,
243
- node_output_mocks=self._context._get_all_node_output_mocks(),
244
- )
245
-
246
- outputs: Optional[BaseOutputs] = None
247
- exception: Optional[NodeException] = None
248
-
249
- for event in subworkflow_stream:
250
- self._context._emit_subworkflow_event(event)
251
- if exception:
252
- continue
253
-
254
- if not is_workflow_event(event):
255
- continue
256
- if event.workflow_definition != function:
257
- continue
258
-
259
- if event.name == "workflow.execution.fulfilled":
260
- outputs = event.outputs
261
- elif event.name == "workflow.execution.rejected":
262
- exception = NodeException.of(event.error)
263
- elif event.name == "workflow.execution.paused":
264
- exception = NodeException(
265
- code=WorkflowErrorCode.INVALID_OUTPUTS,
266
- message="Subworkflow unexpectedly paused",
267
- )
268
-
269
- if exception:
270
- raise exception
271
-
272
- if outputs is None:
273
- raise NodeException(
274
- message="Expected to receive outputs from inline subworkflow",
275
- code=WorkflowErrorCode.INVALID_OUTPUTS,
276
- )
277
-
278
- result = outputs
279
-
280
- self.state.chat_history.append(
281
- ChatMessage(
282
- role="FUNCTION",
283
- content=StringChatMessageContent(value=json.dumps(result, cls=DefaultStateEncoder)),
284
- )
285
- )
286
-
287
- return self.Outputs()
288
-
289
- # Create BaseNode for workflow functions
290
289
  node = type(
291
- f"InlineWorkflowNode_{function.__name__}",
292
- (FunctionNode,),
290
+ f"DynamicInlineSubworkflowNode_{function.__name__}",
291
+ (DynamicInlineSubworkflowNode,),
293
292
  {
294
- "run": execute_inline_workflow_function,
293
+ "subworkflow": function,
294
+ "function_call_output": tool_router_node.Outputs.results,
295
295
  "__module__": __name__,
296
296
  },
297
297
  )
298
298
  else:
299
- # For regular functions, call them directly
300
- def execute_regular_function(self) -> BaseNode.Outputs:
301
- # Get the function call from the tool router output
302
- function_call_output = self.state.meta.node_outputs.get(tool_router_node.Outputs.results)
303
- if function_call_output and len(function_call_output) > 0:
304
- function_call = function_call_output[0]
305
- arguments = function_call.value.arguments
306
- else:
307
- arguments = {}
308
-
309
- # Call the function directly
310
- try:
311
- result = function(**arguments)
312
- except Exception as e:
313
- raise NodeException(
314
- message=f"Error executing function '{function.__name__}': {str(e)}",
315
- code=WorkflowErrorCode.NODE_EXECUTION,
316
- )
317
-
318
- # Add the result to the chat history
319
- self.state.chat_history.append(
320
- ChatMessage(
321
- role="FUNCTION",
322
- content=StringChatMessageContent(value=json.dumps(result, cls=DefaultStateEncoder)),
323
- )
324
- )
325
-
326
- return self.Outputs()
327
-
328
- # Create BaseNode for regular functions
299
+ # For regular functions, use FunctionNode
329
300
  node = type(
330
301
  f"FunctionNode_{function.__name__}",
331
302
  (FunctionNode,),
332
303
  {
333
- "run": execute_regular_function,
304
+ "function_definition": lambda self, **kwargs: function(**kwargs),
305
+ "function_call_output": tool_router_node.Outputs.results,
334
306
  "__module__": __name__,
335
307
  },
336
308
  )
@@ -1,3 +1,3 @@
1
- from ..displayable.tool_calling_node import ToolCallingNode
1
+ from .openai_chat_completion_node import OpenAIChatCompletionNode
2
2
 
3
- __all__ = ["ToolCallingNode"]
3
+ __all__ = ["OpenAIChatCompletionNode"]
@@ -27,7 +27,7 @@ from typing_extensions import dataclass_transform
27
27
  from pydantic import GetCoreSchemaHandler, ValidationInfo, field_serializer, field_validator
28
28
  from pydantic_core import core_schema
29
29
 
30
- from vellum.core.pydantic_utilities import UniversalBaseModel
30
+ from vellum.client.core.pydantic_utilities import UniversalBaseModel
31
31
  from vellum.utils.uuid import is_valid_uuid
32
32
  from vellum.workflows.constants import undefined
33
33
  from vellum.workflows.inputs.base import BaseInputs
@@ -1,6 +1,6 @@
1
1
  from typing import Any, Literal, Union
2
2
 
3
- from vellum.core.pydantic_utilities import UniversalBaseModel
3
+ from vellum.client.core.pydantic_utilities import UniversalBaseModel
4
4
 
5
5
 
6
6
  class BaseStateDelta(UniversalBaseModel):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.14.89
3
+ Version: 1.0.1
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0