vellum-ai 0.14.86__py3-none-any.whl → 0.14.88__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,10 +16,10 @@ class BaseClientWrapper:
16
16
 
17
17
  def get_headers(self) -> typing.Dict[str, str]:
18
18
  headers: typing.Dict[str, str] = {
19
- "User-Agent": "vellum-ai/0.14.86",
19
+ "User-Agent": "vellum-ai/0.14.88",
20
20
  "X-Fern-Language": "Python",
21
21
  "X-Fern-SDK-Name": "vellum-ai",
22
- "X-Fern-SDK-Version": "0.14.86",
22
+ "X-Fern-SDK-Version": "0.14.88",
23
23
  }
24
24
  headers["X-API-KEY"] = self.api_key
25
25
  return headers
@@ -2,4 +2,4 @@
2
2
 
3
3
  import typing
4
4
 
5
- ApiVersionEnum = typing.Literal["2024-10-25"]
5
+ ApiVersionEnum = typing.Union[typing.Literal["2024-10-25", "2025-07-30"], typing.Any]
@@ -19,7 +19,7 @@ class ExecutionThinkingVellumValue(UniversalBaseModel):
19
19
 
20
20
  name: str
21
21
  type: typing.Literal["THINKING"] = "THINKING"
22
- value: typing.List[StringVellumValue]
22
+ value: StringVellumValue
23
23
 
24
24
  if IS_PYDANTIC_V2:
25
25
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -13,7 +13,7 @@ class ThinkingVellumValue(UniversalBaseModel):
13
13
  """
14
14
 
15
15
  type: typing.Literal["THINKING"] = "THINKING"
16
- value: typing.List[StringVellumValue]
16
+ value: StringVellumValue
17
17
 
18
18
  if IS_PYDANTIC_V2:
19
19
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -13,7 +13,7 @@ class ThinkingVellumValueRequest(UniversalBaseModel):
13
13
  """
14
14
 
15
15
  type: typing.Literal["THINKING"] = "THINKING"
16
- value: typing.List[StringVellumValueRequest]
16
+ value: StringVellumValueRequest
17
17
 
18
18
  if IS_PYDANTIC_V2:
19
19
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -33,6 +33,7 @@ class WorkflowEventExecutionRead(UniversalBaseModel):
33
33
  metric_results: typing.List[WorkflowExecutionViewOnlineEvalMetricResult]
34
34
  usage_results: typing.Optional[typing.List[WorkflowExecutionUsageResult]] = None
35
35
  spans: typing.List[VellumSpan]
36
+ state: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
36
37
 
37
38
  if IS_PYDANTIC_V2:
38
39
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -103,7 +103,7 @@ def is_unresolved(value: Any) -> bool:
103
103
  return any(is_unresolved(getattr(value, field.name)) for field in dataclasses.fields(value))
104
104
 
105
105
  if isinstance(value, BaseModel):
106
- return any(is_unresolved(getattr(value, key)) for key in value.model_fields.keys())
106
+ return any(is_unresolved(getattr(value, key)) for key in value.__class__.model_fields.keys())
107
107
 
108
108
  if isinstance(value, Mapping):
109
109
  return any(is_unresolved(item) for item in value.values())
@@ -64,9 +64,8 @@ class InlinePromptNode(BaseInlinePromptNode[StateType]):
64
64
  elif output.type == "FUNCTION_CALL":
65
65
  string_outputs.append(output.value.model_dump_json(indent=4))
66
66
  elif output.type == "THINKING":
67
- for thinking_item in output.value:
68
- if thinking_item.type == "STRING":
69
- string_outputs.append(thinking_item.value)
67
+ if output.value.type == "STRING":
68
+ string_outputs.append(output.value.value)
70
69
  else:
71
70
  string_outputs.append(output.value.message)
72
71
 
@@ -66,9 +66,8 @@ class PromptDeploymentNode(BasePromptDeploymentNode[StateType]):
66
66
  elif output.type == "FUNCTION_CALL":
67
67
  string_outputs.append(output.value.model_dump_json(indent=4))
68
68
  elif output.type == "THINKING":
69
- for thinking_item in output.value:
70
- if thinking_item.type == "STRING":
71
- string_outputs.append(thinking_item.value)
69
+ if output.value.type == "STRING":
70
+ string_outputs.append(output.value.value)
72
71
  else:
73
72
  string_outputs.append(output.value.message)
74
73
 
@@ -1,13 +1,19 @@
1
1
  import json
2
- from typing import Any, List
2
+ from uuid import uuid4
3
+ from typing import Any, Iterator, List
3
4
 
4
5
  from vellum import ChatMessage
6
+ from vellum.client.types.fulfilled_execute_prompt_event import FulfilledExecutePromptEvent
5
7
  from vellum.client.types.function_call import FunctionCall
6
8
  from vellum.client.types.function_call_vellum_value import FunctionCallVellumValue
9
+ from vellum.client.types.initiated_execute_prompt_event import InitiatedExecutePromptEvent
7
10
  from vellum.client.types.string_chat_message_content import StringChatMessageContent
11
+ from vellum.client.types.string_vellum_value import StringVellumValue
12
+ from vellum.client.types.variable_prompt_block import VariablePromptBlock
8
13
  from vellum.workflows import BaseWorkflow
9
14
  from vellum.workflows.inputs.base import BaseInputs
10
15
  from vellum.workflows.nodes.bases import BaseNode
16
+ from vellum.workflows.nodes.displayable.tool_calling_node.node import ToolCallingNode
11
17
  from vellum.workflows.nodes.displayable.tool_calling_node.utils import create_function_node, create_tool_router_node
12
18
  from vellum.workflows.outputs.base import BaseOutputs
13
19
  from vellum.workflows.state.base import BaseState, StateMeta
@@ -140,3 +146,65 @@ def test_deployment_definition_release_tag_defaults_to_latest():
140
146
 
141
147
  # THEN the release_tag should default to "LATEST"
142
148
  assert deployment_config.release_tag == "LATEST"
149
+
150
+
151
+ def test_tool_calling_node_with_user_provided_chat_history_block(vellum_adhoc_prompt_client):
152
+ """
153
+ Test that ToolCallingNode with user-provided chat history block merges user and node messages.
154
+ """
155
+
156
+ # GIVEN a ToolCallingNode with a user-provided chat history block
157
+ user_chat_history_block = VariablePromptBlock(
158
+ block_type="VARIABLE",
159
+ input_variable="chat_history",
160
+ state=None,
161
+ cache_config=None,
162
+ )
163
+
164
+ class TestToolCallingNode(ToolCallingNode):
165
+ ml_model = "gpt-4o-mini"
166
+ blocks = [user_chat_history_block]
167
+ functions = [first_function]
168
+ prompt_inputs = {"chat_history": [ChatMessage(role="USER", text="Hello from user")]}
169
+ max_prompt_iterations = 1
170
+
171
+ def generate_prompt_events(*args: Any, **kwargs: Any) -> Iterator[Any]:
172
+ execution_id = str(uuid4())
173
+ events = [
174
+ InitiatedExecutePromptEvent(execution_id=execution_id),
175
+ FulfilledExecutePromptEvent(
176
+ execution_id=execution_id,
177
+ outputs=[StringVellumValue(value="Hello! I can help you.")],
178
+ ),
179
+ ]
180
+ yield from events
181
+
182
+ vellum_adhoc_prompt_client.adhoc_execute_prompt_stream.side_effect = generate_prompt_events
183
+
184
+ # AND a state
185
+ state = BaseState()
186
+
187
+ # WHEN the ToolCallingNode runs
188
+ node = TestToolCallingNode(state=state)
189
+ list(node.run())
190
+
191
+ # THEN the API should be called with the correct blocks
192
+ mock_api = vellum_adhoc_prompt_client.adhoc_execute_prompt_stream
193
+ assert mock_api.call_count >= 1
194
+
195
+ # AND the blocks should include the user-provided chat_history block
196
+ call_kwargs = mock_api.call_args.kwargs
197
+ blocks = call_kwargs["blocks"]
198
+
199
+ chat_history_blocks = [
200
+ block for block in blocks if block.block_type == "VARIABLE" and block.input_variable == "chat_history"
201
+ ]
202
+ assert len(chat_history_blocks) == 1
203
+
204
+ # AND the input_values should include the user's chat history
205
+ input_values = call_kwargs["input_values"]
206
+ chat_history_inputs = [
207
+ input_val for input_val in input_values if hasattr(input_val, "key") and input_val.key == "chat_history"
208
+ ]
209
+ assert len(chat_history_inputs) == 1
210
+ assert chat_history_inputs[0].value == [ChatMessage(role="USER", text="Hello from user")]
@@ -8,12 +8,14 @@ from vellum.client.types.function_call_chat_message_content import FunctionCallC
8
8
  from vellum.client.types.function_call_chat_message_content_value import FunctionCallChatMessageContentValue
9
9
  from vellum.client.types.string_chat_message_content import StringChatMessageContent
10
10
  from vellum.client.types.variable_prompt_block import VariablePromptBlock
11
+ from vellum.workflows.context import execution_context, get_parent_context
11
12
  from vellum.workflows.errors.types import WorkflowErrorCode
13
+ from vellum.workflows.events.workflow import is_workflow_event
12
14
  from vellum.workflows.exceptions import NodeException
13
15
  from vellum.workflows.nodes.bases import BaseNode
14
16
  from vellum.workflows.nodes.displayable.inline_prompt_node.node import InlinePromptNode
15
17
  from vellum.workflows.nodes.displayable.subworkflow_deployment_node.node import SubworkflowDeploymentNode
16
- from vellum.workflows.outputs.base import BaseOutput
18
+ from vellum.workflows.outputs.base import BaseOutput, BaseOutputs
17
19
  from vellum.workflows.ports.port import Port
18
20
  from vellum.workflows.references.lazy import LazyReference
19
21
  from vellum.workflows.state.context import WorkflowContext
@@ -21,6 +23,9 @@ from vellum.workflows.state.encoder import DefaultStateEncoder
21
23
  from vellum.workflows.types.core import EntityInputsInterface, MergeBehavior, Tool
22
24
  from vellum.workflows.types.definition import DeploymentDefinition
23
25
  from vellum.workflows.types.generics import is_workflow_class
26
+ from vellum.workflows.workflows.event_filters import all_workflow_event_filter
27
+
28
+ CHAT_HISTORY_VARIABLE = "chat_history"
24
29
 
25
30
 
26
31
  class FunctionNode(BaseNode):
@@ -40,7 +45,10 @@ class ToolRouterNode(InlinePromptNode):
40
45
  max_iterations_message = f"Maximum number of prompt iterations `{self.max_prompt_iterations}` reached."
41
46
  raise NodeException(message=max_iterations_message, code=WorkflowErrorCode.NODE_EXECUTION)
42
47
 
43
- self.prompt_inputs = {**self.prompt_inputs, "chat_history": self.state.chat_history} # type: ignore
48
+ # Merge user-provided chat history with node's chat history
49
+ user_chat_history = self.prompt_inputs.get(CHAT_HISTORY_VARIABLE, []) if self.prompt_inputs else []
50
+ merged_chat_history = user_chat_history + self.state.chat_history
51
+ self.prompt_inputs = {**self.prompt_inputs, CHAT_HISTORY_VARIABLE: merged_chat_history} # type: ignore
44
52
  generator = super().run()
45
53
  for output in generator:
46
54
  if output.name == "results" and output.value:
@@ -102,16 +110,21 @@ def create_tool_router_node(
102
110
  # If no functions exist, create a simple Ports class with just a default port
103
111
  Ports = type("Ports", (), {"default": Port(default=True)})
104
112
 
105
- # Add a chat history block to blocks
106
- blocks.append(
107
- VariablePromptBlock(
108
- block_type="VARIABLE",
109
- input_variable="chat_history",
110
- state=None,
111
- cache_config=None,
112
- )
113
+ # Add a chat history block to blocks only if one doesn't already exist
114
+ has_chat_history_block = any(
115
+ block.block_type == "VARIABLE" and block.input_variable == CHAT_HISTORY_VARIABLE for block in blocks
113
116
  )
114
117
 
118
+ if not has_chat_history_block:
119
+ blocks.append(
120
+ VariablePromptBlock(
121
+ block_type="VARIABLE",
122
+ input_variable=CHAT_HISTORY_VARIABLE,
123
+ state=None,
124
+ cache_config=None,
125
+ )
126
+ )
127
+
115
128
  node = cast(
116
129
  Type[ToolRouterNode],
117
130
  type(
@@ -200,26 +213,60 @@ def create_function_node(
200
213
  elif is_workflow_class(function):
201
214
  # Create a class-level wrapper that calls the original function
202
215
  def execute_inline_workflow_function(self) -> BaseNode.Outputs:
203
- outputs = self.state.meta.node_outputs.get(tool_router_node.Outputs.text)
204
-
205
- outputs = json.loads(outputs)
206
- arguments = outputs["arguments"]
216
+ function_call_output = self.state.meta.node_outputs.get(tool_router_node.Outputs.results)
217
+ if function_call_output and len(function_call_output) > 0:
218
+ function_call = function_call_output[0]
219
+ arguments = function_call.value.arguments
220
+ else:
221
+ arguments = {}
207
222
 
208
223
  # Call the function based on its type
209
224
  inputs_instance = function.get_inputs_class()(**arguments)
210
- workflow = function(context=WorkflowContext.create_from(self._context))
211
- terminal_event = workflow.run(
212
- inputs=inputs_instance,
213
- )
214
- if terminal_event.name == "workflow.execution.paused":
225
+
226
+ with execution_context(parent_context=get_parent_context()):
227
+ workflow = function(
228
+ parent_state=self.state,
229
+ context=WorkflowContext.create_from(self._context),
230
+ )
231
+ subworkflow_stream = workflow.stream(
232
+ inputs=inputs_instance,
233
+ event_filter=all_workflow_event_filter,
234
+ node_output_mocks=self._context._get_all_node_output_mocks(),
235
+ )
236
+
237
+ outputs: Optional[BaseOutputs] = None
238
+ exception: Optional[NodeException] = None
239
+
240
+ for event in subworkflow_stream:
241
+ self._context._emit_subworkflow_event(event)
242
+ if exception:
243
+ continue
244
+
245
+ if not is_workflow_event(event):
246
+ continue
247
+ if event.workflow_definition != function:
248
+ continue
249
+
250
+ if event.name == "workflow.execution.fulfilled":
251
+ outputs = event.outputs
252
+ elif event.name == "workflow.execution.rejected":
253
+ exception = NodeException.of(event.error)
254
+ elif event.name == "workflow.execution.paused":
255
+ exception = NodeException(
256
+ code=WorkflowErrorCode.INVALID_OUTPUTS,
257
+ message="Subworkflow unexpectedly paused",
258
+ )
259
+
260
+ if exception:
261
+ raise exception
262
+
263
+ if outputs is None:
215
264
  raise NodeException(
265
+ message="Expected to receive outputs from inline subworkflow",
216
266
  code=WorkflowErrorCode.INVALID_OUTPUTS,
217
- message="Subworkflow unexpectedly paused",
218
267
  )
219
- elif terminal_event.name == "workflow.execution.fulfilled":
220
- result = terminal_event.outputs
221
- elif terminal_event.name == "workflow.execution.rejected":
222
- raise NodeException(message=terminal_event.error.message, code=terminal_event.error.code)
268
+
269
+ result = outputs
223
270
 
224
271
  self.state.chat_history.append(
225
272
  ChatMessage(
@@ -82,7 +82,7 @@ def _compile_default_value(default: Any) -> Any:
82
82
  if isinstance(default, BaseModel):
83
83
  return {
84
84
  field_name: _compile_default_value(getattr(default, field_name))
85
- for field_name in default.model_fields.keys()
85
+ for field_name in default.__class__.model_fields.keys()
86
86
  }
87
87
 
88
88
  return default
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.14.86
3
+ Version: 0.14.88
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -144,7 +144,7 @@ vellum/client/README.md,sha256=47bNYmRLSISR1ING58kXXZ88nFLPGFv0bAspBtuXG3g,4306
144
144
  vellum/client/__init__.py,sha256=tKLc-F8I8_62RSZg7J7Lvo1dUQ_or7DGsDhbMyhWfGA,120958
145
145
  vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
146
146
  vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
147
- vellum/client/core/client_wrapper.py,sha256=K_siMZE7rIopQdqNiIxKekgGPV2iWv2OagS8NwMBP_8,1916
147
+ vellum/client/core/client_wrapper.py,sha256=DiS34kxSaOHT-5h1HVeW53M67W2ZNWcKRBYI_HgjX_0,1916
148
148
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
149
149
  vellum/client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
150
150
  vellum/client/core/http_client.py,sha256=cKs2w0ybDBk1wHQf-fTALm_MmvaMe3cZKcYJxqmCxkE,19539
@@ -223,7 +223,7 @@ vellum/client/types/add_openai_api_key_enum.py,sha256=GB7sLK_Ou7-Xn73sKJHUo6Gx3T
223
223
  vellum/client/types/api_node_result.py,sha256=3zAbNGNYY6EgJDzqmcIvLqq8wU-WoMEwGT1W1fjto6U,729
224
224
  vellum/client/types/api_node_result_data.py,sha256=qb0hMdyZvWnlqjfmzSf_AWyUYbBhwkXGoRXNtduSG0U,909
225
225
  vellum/client/types/api_request_parent_context.py,sha256=7137NWvk4Lyx4jKZU1GhQYBneuSe34ObCuY4h2jXhoc,1389
226
- vellum/client/types/api_version_enum.py,sha256=CfNOk0cJ_VfIlKwD4ORH-v8z0KHP3u0Xolu_8ze86To,126
226
+ vellum/client/types/api_version_enum.py,sha256=PObXD86PDEuoxBpLg2pIv_xEeHbN99r2tgAKnCiu5hk,166
227
227
  vellum/client/types/array_chat_message_content.py,sha256=353TDzStNXA2dQETvnJrazCr33nlFx7hgvvPV526ECg,780
228
228
  vellum/client/types/array_chat_message_content_item.py,sha256=udxHZTZLQ1Ekg6X1D616a61crYxlQDz8QU6-CAHznrQ,625
229
229
  vellum/client/types/array_chat_message_content_item_request.py,sha256=3ULz8jtWvJIGl2SaGfw_hX9sKJxZaM4uUrZXyORlMhQ,742
@@ -330,7 +330,7 @@ vellum/client/types/execution_json_vellum_value.py,sha256=oGY3CsJBKeOuEexmITfRYc
330
330
  vellum/client/types/execution_number_vellum_value.py,sha256=b2TpqyafRHCdl6EhgctNgUSLU-JBdouU6OgM8Jk_O78,809
331
331
  vellum/client/types/execution_search_results_vellum_value.py,sha256=HkxoXaUF6pMbfXd5wLk5VKmcXed2IRfEzkxsoGpwmg0,898
332
332
  vellum/client/types/execution_string_vellum_value.py,sha256=4w0ottwB5F2NL3uEXBBggP7XkcdE_D2lGmEobkXWY7o,807
333
- vellum/client/types/execution_thinking_vellum_value.py,sha256=n66gPYgiDpOGhOniQlRE6jDUOBnPkHnfOvz8RVyzw1g,879
333
+ vellum/client/types/execution_thinking_vellum_value.py,sha256=5WGuCVue3OMAdM881fDUFGjwOuWhImT6ALQq1qHyTks,866
334
334
  vellum/client/types/execution_vellum_value.py,sha256=gJ4UWA4KKzWGJZpEZGQV8Efqh27PmyWz6RZSsbtNux8,1088
335
335
  vellum/client/types/external_input_descriptor.py,sha256=ErOW2OfFMz1FDGmVY6NgiUBPsleaWhdJBekwFp4ru7o,805
336
336
  vellum/client/types/external_test_case_execution.py,sha256=TkO1CQcEI8LA7sdYJfAqhbdkj27sXEkF8VL7zBeDBM4,877
@@ -699,8 +699,8 @@ vellum/client/types/test_suite_test_case_replace_bulk_operation_request.py,sha25
699
699
  vellum/client/types/test_suite_test_case_replaced_bulk_result.py,sha256=BIlXI7udygWrwtyRhCl8hmExHbkAl9lI8s3sm1G5iGc,1019
700
700
  vellum/client/types/test_suite_test_case_replaced_bulk_result_data.py,sha256=ORmcUvwzvRLRaoFhxdXFIKzPxOI6PU1kESl0R6rsJuw,651
701
701
  vellum/client/types/test_suite_test_case_upsert_bulk_operation_request.py,sha256=PrKuqePiXBQv6iLAxsk4xQg29KGdOlqMDhIVdGNxuz4,1071
702
- vellum/client/types/thinking_vellum_value.py,sha256=XLeSzCwbO9kVek6zdMn6pQChLKDYFeyabE9OiFeoq_I,755
703
- vellum/client/types/thinking_vellum_value_request.py,sha256=n6FqEZs5TKhhNZqe9yau_Vcn55ktXxFQO5sWa0aRNbg,784
702
+ vellum/client/types/thinking_vellum_value.py,sha256=q6IZFZrAXkoYCQOfHLLKWCTYj_zW8HJIzovO6IlzY-A,742
703
+ vellum/client/types/thinking_vellum_value_request.py,sha256=dHxjlH_6nxRjcixAudmvwwOMkWc4JmLHfRWKK4rGssA,771
704
704
  vellum/client/types/token_overlapping_window_chunker_config.py,sha256=_8vR9AiZQmb5OA3OojbjuTOGiGNTS9EY0vXrmej_TM0,731
705
705
  vellum/client/types/token_overlapping_window_chunker_config_request.py,sha256=O58w5om6EsCgZeqM7n3KSzwo1PqINyHWln46EFW4Inc,738
706
706
  vellum/client/types/token_overlapping_window_chunking.py,sha256=TghiPKWZg3Eg_UzGI9VmjQgVPZFABrnhfsz4iPLEem8,889
@@ -742,7 +742,7 @@ vellum/client/types/workflow_deployment_release_workflow_deployment.py,sha256=ir
742
742
  vellum/client/types/workflow_deployment_release_workflow_version.py,sha256=V1Eb3goBX2lle851LkhR1tbCFa0z_O-yhMuQWCN6c-g,773
743
743
  vellum/client/types/workflow_error.py,sha256=EQajkEmLS64T0wYm0goHQl0rT7Lguurk8pLwkhjsgAI,282
744
744
  vellum/client/types/workflow_event_error.py,sha256=HIewu_kh3KNPpWegAQArvAGHCp-cBIXqlUAAc_dBZhc,687
745
- vellum/client/types/workflow_event_execution_read.py,sha256=TQaBs2ZkOOJOjCkdmgI9ZX7c4XgIaNIBozCmZlOoZp8,2008
745
+ vellum/client/types/workflow_event_execution_read.py,sha256=a4Nv1UmNlMqDPqGrZLpyy3nNRtc1UqXmzXkeApSfstY,2089
746
746
  vellum/client/types/workflow_execution_actual.py,sha256=YL5WL4O4CyaZWSrxqpE4chJ28EJlyScj5JeaLttegEg,843
747
747
  vellum/client/types/workflow_execution_actual_chat_history_request.py,sha256=L6U8tgM7SiU4qGJMZChFzj6HfHgO-YAlTXfbT7ZIaE4,1993
748
748
  vellum/client/types/workflow_execution_actual_json_request.py,sha256=5QYaPCSOwFnjH_kTrB2bTznTMFExSZdBhTkmelf1h4Q,1931
@@ -1523,7 +1523,7 @@ vellum/workflows/descriptors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
1523
1523
  vellum/workflows/descriptors/base.py,sha256=X47a4TClHknsnjs53DkiXnop_5uLGVor487oxhHuGo4,14902
1524
1524
  vellum/workflows/descriptors/exceptions.py,sha256=gUy4UD9JFUKSeQnQpeuDSLiRqWjWiIsxLahB7p_q3JY,54
1525
1525
  vellum/workflows/descriptors/tests/test_utils.py,sha256=HJ5DoRz0sJvViGxyZ_FtytZjxN2J8xTkGtaVwCy6Q90,6928
1526
- vellum/workflows/descriptors/utils.py,sha256=gmVXJjf2yWmvlYey41J2FZHeSou0JuCHKb3826K_Jok,3838
1526
+ vellum/workflows/descriptors/utils.py,sha256=1siECBf6AI54gwwUwkF6mP9rYsRryUGaOYBbMpQaceM,3848
1527
1527
  vellum/workflows/edges/__init__.py,sha256=wSkmAnz9xyi4vZwtDbKxwlplt2skD7n3NsxkvR_pUus,50
1528
1528
  vellum/workflows/edges/edge.py,sha256=N0SnY3gKVuxImPAdCbPMPlHJIXbkQ3fwq_LbJRvVMFc,677
1529
1529
  vellum/workflows/emitters/__init__.py,sha256=YyOgaoLtVW8eFNEWODzCYb0HzL0PoSeNRf4diJ1Y0dk,80
@@ -1654,7 +1654,7 @@ vellum/workflows/nodes/displayable/guardrail_node/test_node.py,sha256=SAGv6hSFcB
1654
1654
  vellum/workflows/nodes/displayable/guardrail_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1655
1655
  vellum/workflows/nodes/displayable/guardrail_node/tests/test_node.py,sha256=X2pd6TI8miYxIa7rgvs1pHTEreyWcf77EyR0_Jsa700,2055
1656
1656
  vellum/workflows/nodes/displayable/inline_prompt_node/__init__.py,sha256=gSUOoEZLlrx35-tQhSAd3An8WDwBqyiQh-sIebLU9wU,74
1657
- vellum/workflows/nodes/displayable/inline_prompt_node/node.py,sha256=pj1tjx3XfiGQz9a6x3xsK1DR7v1XeVHM93AKVi-GgRQ,3060
1657
+ vellum/workflows/nodes/displayable/inline_prompt_node/node.py,sha256=LkFaS7GDPdhqMjQ3duHPX6pjl0z6xKzGxDueQC4aeA0,2999
1658
1658
  vellum/workflows/nodes/displayable/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1659
1659
  vellum/workflows/nodes/displayable/inline_prompt_node/tests/test_node.py,sha256=bBHs90mV5SZ3rJPAL0wx4WWyawUA406LgMPOdvpZC_A,10923
1660
1660
  vellum/workflows/nodes/displayable/merge_node/__init__.py,sha256=J8IC08dSH7P76wKlNuxe1sn7toNGtSQdFirUbtPDEs0,60
@@ -1662,7 +1662,7 @@ vellum/workflows/nodes/displayable/merge_node/node.py,sha256=nZtGGVAvY4fvGg8vwV6
1662
1662
  vellum/workflows/nodes/displayable/note_node/__init__.py,sha256=KWA3P4fyYJ-fOTky8qNGlcOotQ-HeHJ9AjZt6mRQmCE,58
1663
1663
  vellum/workflows/nodes/displayable/note_node/node.py,sha256=sIN1VBQ7zeT3GhN0kupXbFfdpvgedWV79k4woJNp5IQ,394
1664
1664
  vellum/workflows/nodes/displayable/prompt_deployment_node/__init__.py,sha256=krX1Hds-TSVYZsx0wJFX4wsAKkEFYOX1ifwRGiIM-EA,82
1665
- vellum/workflows/nodes/displayable/prompt_deployment_node/node.py,sha256=d71QJmtozr06z9tX_7IZHnDUFBGg05YmQ7USu1XEs0M,3460
1665
+ vellum/workflows/nodes/displayable/prompt_deployment_node/node.py,sha256=rRUIM-zbVCV_0odyPExEZay0k4VCjsYyZ3OC9ZpHQsc,3399
1666
1666
  vellum/workflows/nodes/displayable/prompt_deployment_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1667
1667
  vellum/workflows/nodes/displayable/prompt_deployment_node/tests/test_node.py,sha256=c_nuuqrwiIjgj4qIbVypfDuOc-3TlgO6CbXFqQl2Nqw,19725
1668
1668
  vellum/workflows/nodes/displayable/search_node/__init__.py,sha256=hpBpvbrDYf43DElRZFLzieSn8weXiwNiiNOJurERQbs,62
@@ -1681,9 +1681,9 @@ vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py,sha
1681
1681
  vellum/workflows/nodes/displayable/tool_calling_node/__init__.py,sha256=3n0-ysmFKsr40CVxPthc0rfJgqVJeZuUEsCmYudLVRg,117
1682
1682
  vellum/workflows/nodes/displayable/tool_calling_node/node.py,sha256=7MqEtw-RejpJ5Uer11tIFKRtklC4DfiWVx2-tpnA1Gg,6310
1683
1683
  vellum/workflows/nodes/displayable/tool_calling_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1684
- vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py,sha256=ZWxicmEpBUAIudqUcKi1Li1f-Z2thYq8xI_Kf6YstMg,5077
1684
+ vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py,sha256=3cCCIz-quneCVl3w8XnGxzyHx-nn_jGTKCbSH9jkYT0,7824
1685
1685
  vellum/workflows/nodes/displayable/tool_calling_node/tests/test_utils.py,sha256=eu6WTyENhGLg9pGp_j69rysZjf_qiQXske1YdZn9PzU,1718
1686
- vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=f2hCLciTqjHXj9tVdnnTAVY0BLUCMBoH0emQeOJCB7I,11309
1686
+ vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=fYlfv0UZFgJ8A6wv586NawOztrA-6SFsZ4WyxhZFxl8,13292
1687
1687
  vellum/workflows/nodes/experimental/README.md,sha256=eF6DfIL8t-HbF9-mcofOMymKrraiBHDLKTlnBa51ZiE,284
1688
1688
  vellum/workflows/nodes/experimental/__init__.py,sha256=k7VQEyvgEdnrEZ-icXx3fiByPnyMOnMXNGGuqacyyik,91
1689
1689
  vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py,sha256=lsyD9laR9p7kx5-BXGH2gUTM242UhKy8SMV0SR6S2iE,90
@@ -1741,7 +1741,7 @@ vellum/workflows/types/tests/test_definition.py,sha256=5wh_WEnE51epkoo-4PE-JbPlg
1741
1741
  vellum/workflows/types/tests/test_utils.py,sha256=UnZog59tR577mVwqZRqqWn2fScoOU1H6up0EzS8zYhw,2536
1742
1742
  vellum/workflows/types/utils.py,sha256=mTctHITBybpt4855x32oCKALBEcMNLn-9cCmfEKgJHQ,6498
1743
1743
  vellum/workflows/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1744
- vellum/workflows/utils/functions.py,sha256=ZN0rrIBF4R_KNt1CbRPVNGR36xEMUa1T7FkgZioou-Y,7185
1744
+ vellum/workflows/utils/functions.py,sha256=Gi08SYaTfLF04slbY_YcfP5erIMwtFgtYa59vhWez9k,7195
1745
1745
  vellum/workflows/utils/names.py,sha256=QLUqfJ1tmSEeUwBKTTiv_Qk3QGbInC2RSmlXfGXc8Wo,380
1746
1746
  vellum/workflows/utils/pydantic_schema.py,sha256=eR_bBtY-T0pttJP-ARwagSdCOnwPUtiT3cegm2lzDTQ,1310
1747
1747
  vellum/workflows/utils/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1758,8 +1758,8 @@ vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnad
1758
1758
  vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1759
1759
  vellum/workflows/workflows/tests/test_base_workflow.py,sha256=ptMntHzVyy8ZuzNgeTuk7hREgKQ5UBdgq8VJFSGaW4Y,20832
1760
1760
  vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
1761
- vellum_ai-0.14.86.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1762
- vellum_ai-0.14.86.dist-info/METADATA,sha256=IvjzAyM7NKtDy7yBw7058E6f13UfiQ02VV0rR87ts3o,5556
1763
- vellum_ai-0.14.86.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1764
- vellum_ai-0.14.86.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1765
- vellum_ai-0.14.86.dist-info/RECORD,,
1761
+ vellum_ai-0.14.88.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1762
+ vellum_ai-0.14.88.dist-info/METADATA,sha256=LkjU4RYrX6PB9kecW9GnNtnIvCpBmvP_8H4r34jJesY,5556
1763
+ vellum_ai-0.14.88.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1764
+ vellum_ai-0.14.88.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1765
+ vellum_ai-0.14.88.dist-info/RECORD,,