vellum-ai 0.14.85__py3-none-any.whl → 0.14.87__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/client/core/client_wrapper.py +2 -2
- vellum/client/types/api_version_enum.py +1 -1
- vellum/client/types/workflow_event_execution_read.py +1 -0
- vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py +69 -1
- vellum/workflows/nodes/displayable/tool_calling_node/utils.py +19 -9
- vellum/workflows/workflows/base.py +61 -1
- vellum/workflows/workflows/tests/test_base_workflow.py +19 -6
- {vellum_ai-0.14.85.dist-info → vellum_ai-0.14.87.dist-info}/METADATA +1 -1
- {vellum_ai-0.14.85.dist-info → vellum_ai-0.14.87.dist-info}/RECORD +12 -12
- {vellum_ai-0.14.85.dist-info → vellum_ai-0.14.87.dist-info}/LICENSE +0 -0
- {vellum_ai-0.14.85.dist-info → vellum_ai-0.14.87.dist-info}/WHEEL +0 -0
- {vellum_ai-0.14.85.dist-info → vellum_ai-0.14.87.dist-info}/entry_points.txt +0 -0
@@ -16,10 +16,10 @@ class BaseClientWrapper:
|
|
16
16
|
|
17
17
|
def get_headers(self) -> typing.Dict[str, str]:
|
18
18
|
headers: typing.Dict[str, str] = {
|
19
|
-
"User-Agent": "vellum-ai/0.14.
|
19
|
+
"User-Agent": "vellum-ai/0.14.87",
|
20
20
|
"X-Fern-Language": "Python",
|
21
21
|
"X-Fern-SDK-Name": "vellum-ai",
|
22
|
-
"X-Fern-SDK-Version": "0.14.
|
22
|
+
"X-Fern-SDK-Version": "0.14.87",
|
23
23
|
}
|
24
24
|
headers["X-API-KEY"] = self.api_key
|
25
25
|
return headers
|
@@ -33,6 +33,7 @@ class WorkflowEventExecutionRead(UniversalBaseModel):
|
|
33
33
|
metric_results: typing.List[WorkflowExecutionViewOnlineEvalMetricResult]
|
34
34
|
usage_results: typing.Optional[typing.List[WorkflowExecutionUsageResult]] = None
|
35
35
|
spans: typing.List[VellumSpan]
|
36
|
+
state: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
|
36
37
|
|
37
38
|
if IS_PYDANTIC_V2:
|
38
39
|
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
@@ -1,13 +1,19 @@
|
|
1
1
|
import json
|
2
|
-
from
|
2
|
+
from uuid import uuid4
|
3
|
+
from typing import Any, Iterator, List
|
3
4
|
|
4
5
|
from vellum import ChatMessage
|
6
|
+
from vellum.client.types.fulfilled_execute_prompt_event import FulfilledExecutePromptEvent
|
5
7
|
from vellum.client.types.function_call import FunctionCall
|
6
8
|
from vellum.client.types.function_call_vellum_value import FunctionCallVellumValue
|
9
|
+
from vellum.client.types.initiated_execute_prompt_event import InitiatedExecutePromptEvent
|
7
10
|
from vellum.client.types.string_chat_message_content import StringChatMessageContent
|
11
|
+
from vellum.client.types.string_vellum_value import StringVellumValue
|
12
|
+
from vellum.client.types.variable_prompt_block import VariablePromptBlock
|
8
13
|
from vellum.workflows import BaseWorkflow
|
9
14
|
from vellum.workflows.inputs.base import BaseInputs
|
10
15
|
from vellum.workflows.nodes.bases import BaseNode
|
16
|
+
from vellum.workflows.nodes.displayable.tool_calling_node.node import ToolCallingNode
|
11
17
|
from vellum.workflows.nodes.displayable.tool_calling_node.utils import create_function_node, create_tool_router_node
|
12
18
|
from vellum.workflows.outputs.base import BaseOutputs
|
13
19
|
from vellum.workflows.state.base import BaseState, StateMeta
|
@@ -140,3 +146,65 @@ def test_deployment_definition_release_tag_defaults_to_latest():
|
|
140
146
|
|
141
147
|
# THEN the release_tag should default to "LATEST"
|
142
148
|
assert deployment_config.release_tag == "LATEST"
|
149
|
+
|
150
|
+
|
151
|
+
def test_tool_calling_node_with_user_provided_chat_history_block(vellum_adhoc_prompt_client):
|
152
|
+
"""
|
153
|
+
Test that ToolCallingNode with user-provided chat history block merges user and node messages.
|
154
|
+
"""
|
155
|
+
|
156
|
+
# GIVEN a ToolCallingNode with a user-provided chat history block
|
157
|
+
user_chat_history_block = VariablePromptBlock(
|
158
|
+
block_type="VARIABLE",
|
159
|
+
input_variable="chat_history",
|
160
|
+
state=None,
|
161
|
+
cache_config=None,
|
162
|
+
)
|
163
|
+
|
164
|
+
class TestToolCallingNode(ToolCallingNode):
|
165
|
+
ml_model = "gpt-4o-mini"
|
166
|
+
blocks = [user_chat_history_block]
|
167
|
+
functions = [first_function]
|
168
|
+
prompt_inputs = {"chat_history": [ChatMessage(role="USER", text="Hello from user")]}
|
169
|
+
max_prompt_iterations = 1
|
170
|
+
|
171
|
+
def generate_prompt_events(*args: Any, **kwargs: Any) -> Iterator[Any]:
|
172
|
+
execution_id = str(uuid4())
|
173
|
+
events = [
|
174
|
+
InitiatedExecutePromptEvent(execution_id=execution_id),
|
175
|
+
FulfilledExecutePromptEvent(
|
176
|
+
execution_id=execution_id,
|
177
|
+
outputs=[StringVellumValue(value="Hello! I can help you.")],
|
178
|
+
),
|
179
|
+
]
|
180
|
+
yield from events
|
181
|
+
|
182
|
+
vellum_adhoc_prompt_client.adhoc_execute_prompt_stream.side_effect = generate_prompt_events
|
183
|
+
|
184
|
+
# AND a state
|
185
|
+
state = BaseState()
|
186
|
+
|
187
|
+
# WHEN the ToolCallingNode runs
|
188
|
+
node = TestToolCallingNode(state=state)
|
189
|
+
list(node.run())
|
190
|
+
|
191
|
+
# THEN the API should be called with the correct blocks
|
192
|
+
mock_api = vellum_adhoc_prompt_client.adhoc_execute_prompt_stream
|
193
|
+
assert mock_api.call_count >= 1
|
194
|
+
|
195
|
+
# AND the blocks should include the user-provided chat_history block
|
196
|
+
call_kwargs = mock_api.call_args.kwargs
|
197
|
+
blocks = call_kwargs["blocks"]
|
198
|
+
|
199
|
+
chat_history_blocks = [
|
200
|
+
block for block in blocks if block.block_type == "VARIABLE" and block.input_variable == "chat_history"
|
201
|
+
]
|
202
|
+
assert len(chat_history_blocks) == 1
|
203
|
+
|
204
|
+
# AND the input_values should include the user's chat history
|
205
|
+
input_values = call_kwargs["input_values"]
|
206
|
+
chat_history_inputs = [
|
207
|
+
input_val for input_val in input_values if hasattr(input_val, "key") and input_val.key == "chat_history"
|
208
|
+
]
|
209
|
+
assert len(chat_history_inputs) == 1
|
210
|
+
assert chat_history_inputs[0].value == [ChatMessage(role="USER", text="Hello from user")]
|
@@ -22,6 +22,8 @@ from vellum.workflows.types.core import EntityInputsInterface, MergeBehavior, To
|
|
22
22
|
from vellum.workflows.types.definition import DeploymentDefinition
|
23
23
|
from vellum.workflows.types.generics import is_workflow_class
|
24
24
|
|
25
|
+
CHAT_HISTORY_VARIABLE = "chat_history"
|
26
|
+
|
25
27
|
|
26
28
|
class FunctionNode(BaseNode):
|
27
29
|
"""Node that executes a specific function."""
|
@@ -40,7 +42,10 @@ class ToolRouterNode(InlinePromptNode):
|
|
40
42
|
max_iterations_message = f"Maximum number of prompt iterations `{self.max_prompt_iterations}` reached."
|
41
43
|
raise NodeException(message=max_iterations_message, code=WorkflowErrorCode.NODE_EXECUTION)
|
42
44
|
|
43
|
-
|
45
|
+
# Merge user-provided chat history with node's chat history
|
46
|
+
user_chat_history = self.prompt_inputs.get(CHAT_HISTORY_VARIABLE, []) if self.prompt_inputs else []
|
47
|
+
merged_chat_history = user_chat_history + self.state.chat_history
|
48
|
+
self.prompt_inputs = {**self.prompt_inputs, CHAT_HISTORY_VARIABLE: merged_chat_history} # type: ignore
|
44
49
|
generator = super().run()
|
45
50
|
for output in generator:
|
46
51
|
if output.name == "results" and output.value:
|
@@ -102,16 +107,21 @@ def create_tool_router_node(
|
|
102
107
|
# If no functions exist, create a simple Ports class with just a default port
|
103
108
|
Ports = type("Ports", (), {"default": Port(default=True)})
|
104
109
|
|
105
|
-
# Add a chat history block to blocks
|
106
|
-
|
107
|
-
|
108
|
-
block_type="VARIABLE",
|
109
|
-
input_variable="chat_history",
|
110
|
-
state=None,
|
111
|
-
cache_config=None,
|
112
|
-
)
|
110
|
+
# Add a chat history block to blocks only if one doesn't already exist
|
111
|
+
has_chat_history_block = any(
|
112
|
+
block.block_type == "VARIABLE" and block.input_variable == CHAT_HISTORY_VARIABLE for block in blocks
|
113
113
|
)
|
114
114
|
|
115
|
+
if not has_chat_history_block:
|
116
|
+
blocks.append(
|
117
|
+
VariablePromptBlock(
|
118
|
+
block_type="VARIABLE",
|
119
|
+
input_variable=CHAT_HISTORY_VARIABLE,
|
120
|
+
state=None,
|
121
|
+
cache_config=None,
|
122
|
+
)
|
123
|
+
)
|
124
|
+
|
115
125
|
node = cast(
|
116
126
|
Type[ToolRouterNode],
|
117
127
|
type(
|
@@ -2,6 +2,7 @@ from datetime import datetime
|
|
2
2
|
from functools import lru_cache
|
3
3
|
import importlib
|
4
4
|
import inspect
|
5
|
+
import logging
|
5
6
|
from threading import Event as ThreadingEvent
|
6
7
|
from uuid import UUID, uuid4
|
7
8
|
from typing import (
|
@@ -76,6 +77,8 @@ from vellum.workflows.types.utils import get_original_base
|
|
76
77
|
from vellum.workflows.utils.uuids import uuid4_from_hash
|
77
78
|
from vellum.workflows.workflows.event_filters import workflow_event_filter
|
78
79
|
|
80
|
+
logger = logging.getLogger(__name__)
|
81
|
+
|
79
82
|
|
80
83
|
class _BaseWorkflowMeta(type):
|
81
84
|
def __new__(mcs, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]) -> Any:
|
@@ -123,13 +126,70 @@ class _BaseWorkflowMeta(type):
|
|
123
126
|
raise TypeError(f"Unexpected graph type: {graph_item.__class__}")
|
124
127
|
return nodes
|
125
128
|
|
129
|
+
def filter_overlapping_nodes_from_unused_graphs(
|
130
|
+
unused_graphs: Set[GraphAttribute], overlapping_nodes: Set[Type[BaseNode]]
|
131
|
+
) -> Set[GraphAttribute]:
|
132
|
+
filtered_graphs: Set[GraphAttribute] = set()
|
133
|
+
|
134
|
+
for item in unused_graphs:
|
135
|
+
if isinstance(item, Graph):
|
136
|
+
graph_nodes = set(item.nodes)
|
137
|
+
overlapping_in_graph = graph_nodes & overlapping_nodes
|
138
|
+
|
139
|
+
if not overlapping_in_graph:
|
140
|
+
filtered_graphs.add(item)
|
141
|
+
else:
|
142
|
+
non_overlapping_nodes = graph_nodes - overlapping_nodes
|
143
|
+
for node in non_overlapping_nodes:
|
144
|
+
filtered_graphs.add(node)
|
145
|
+
|
146
|
+
elif isinstance(item, set):
|
147
|
+
filtered_nodes: Set[Type[BaseNode]] = set()
|
148
|
+
filtered_graphs_in_set: Set[Graph] = set()
|
149
|
+
|
150
|
+
for subitem in item:
|
151
|
+
if isinstance(subitem, Graph):
|
152
|
+
graph_nodes = set(subitem.nodes)
|
153
|
+
overlapping_in_graph = graph_nodes & overlapping_nodes
|
154
|
+
|
155
|
+
if not overlapping_in_graph:
|
156
|
+
filtered_graphs_in_set.add(subitem)
|
157
|
+
else:
|
158
|
+
non_overlapping_nodes = graph_nodes - overlapping_nodes
|
159
|
+
filtered_nodes.update(non_overlapping_nodes)
|
160
|
+
elif isinstance(subitem, type) and issubclass(subitem, BaseNode):
|
161
|
+
if subitem not in overlapping_nodes:
|
162
|
+
filtered_nodes.add(subitem)
|
163
|
+
else:
|
164
|
+
raise TypeError(f"Unexpected item type in unused_graphs set: {subitem.__class__}")
|
165
|
+
|
166
|
+
# Add non-empty sets back to filtered_graphs
|
167
|
+
if filtered_nodes:
|
168
|
+
filtered_graphs.add(filtered_nodes)
|
169
|
+
if filtered_graphs_in_set:
|
170
|
+
filtered_graphs.add(filtered_graphs_in_set)
|
171
|
+
|
172
|
+
elif isinstance(item, type) and issubclass(item, BaseNode):
|
173
|
+
if item not in overlapping_nodes:
|
174
|
+
filtered_graphs.add(item)
|
175
|
+
else:
|
176
|
+
filtered_graphs.add(item)
|
177
|
+
|
178
|
+
return filtered_graphs
|
179
|
+
|
126
180
|
graph_nodes = collect_nodes(dct.get("graph", set()))
|
127
181
|
unused_nodes = collect_nodes(dct.get("unused_graphs", set()))
|
128
182
|
|
129
183
|
overlap = graph_nodes & unused_nodes
|
130
184
|
if overlap:
|
131
185
|
node_names = [node.__name__ for node in overlap]
|
132
|
-
|
186
|
+
logger.warning(
|
187
|
+
f"Node(s) {', '.join(node_names)} appear in both graph and unused_graphs. Removing from unused_graphs."
|
188
|
+
)
|
189
|
+
|
190
|
+
# Filter out overlapping nodes from unused_graphs
|
191
|
+
if "unused_graphs" in dct:
|
192
|
+
dct["unused_graphs"] = filter_overlapping_nodes_from_unused_graphs(dct["unused_graphs"], overlap)
|
133
193
|
|
134
194
|
cls = super().__new__(mcs, name, bases, dct)
|
135
195
|
workflow_class = cast(Type["BaseWorkflow"], cls)
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import pytest
|
2
|
+
import logging
|
2
3
|
from uuid import UUID, uuid4
|
3
4
|
|
4
5
|
from vellum.workflows.edges.edge import Edge
|
@@ -79,7 +80,6 @@ def test_subworkflow__inherit_base_outputs():
|
|
79
80
|
|
80
81
|
# TEST that the Outputs classes do not inherit from object
|
81
82
|
assert object not in MainWorkflow.Outputs.__bases__
|
82
|
-
assert object not in SubWorkflow.Outputs.__bases__
|
83
83
|
|
84
84
|
# TEST execution
|
85
85
|
workflow = MainWorkflow()
|
@@ -295,7 +295,7 @@ def test_workflow__no_unused_edges():
|
|
295
295
|
assert edges == set()
|
296
296
|
|
297
297
|
|
298
|
-
def test_workflow__node_in_both_graph_and_unused():
|
298
|
+
def test_workflow__node_in_both_graph_and_unused(caplog):
|
299
299
|
class NodeA(BaseNode):
|
300
300
|
pass
|
301
301
|
|
@@ -305,15 +305,28 @@ def test_workflow__node_in_both_graph_and_unused():
|
|
305
305
|
class NodeC(BaseNode):
|
306
306
|
pass
|
307
307
|
|
308
|
-
# WHEN we
|
309
|
-
with
|
308
|
+
# WHEN we create a workflow where NodeA appears in both graph and unused
|
309
|
+
with caplog.at_level(logging.WARNING):
|
310
310
|
|
311
311
|
class TestWorkflow(BaseWorkflow[BaseInputs, BaseState]):
|
312
312
|
graph = NodeA >> NodeB
|
313
313
|
unused_graphs = {NodeA >> NodeC}
|
314
314
|
|
315
|
-
# THEN it should
|
316
|
-
assert
|
315
|
+
# THEN it should log a warning
|
316
|
+
assert len(caplog.records) == 1
|
317
|
+
assert caplog.records[0].levelname == "WARNING"
|
318
|
+
assert (
|
319
|
+
"Node(s) NodeA appear in both graph and unused_graphs. Removing from unused_graphs."
|
320
|
+
in caplog.records[0].message
|
321
|
+
)
|
322
|
+
|
323
|
+
# AND the workflow should be created successfully
|
324
|
+
assert TestWorkflow is not None
|
325
|
+
|
326
|
+
# AND NodeA should be removed from unused_graphs while NodeC remains
|
327
|
+
unused_nodes = set(TestWorkflow.get_unused_nodes())
|
328
|
+
assert NodeA not in unused_nodes
|
329
|
+
assert NodeC in unused_nodes
|
317
330
|
|
318
331
|
|
319
332
|
def test_workflow__unsupported_graph_item():
|
@@ -144,7 +144,7 @@ vellum/client/README.md,sha256=47bNYmRLSISR1ING58kXXZ88nFLPGFv0bAspBtuXG3g,4306
|
|
144
144
|
vellum/client/__init__.py,sha256=tKLc-F8I8_62RSZg7J7Lvo1dUQ_or7DGsDhbMyhWfGA,120958
|
145
145
|
vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
|
146
146
|
vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
147
|
-
vellum/client/core/client_wrapper.py,sha256=
|
147
|
+
vellum/client/core/client_wrapper.py,sha256=ReGrzx3YGXetWtARuAHO8Rr64rj4PwB7DtupM9EEfkE,1916
|
148
148
|
vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
149
149
|
vellum/client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
|
150
150
|
vellum/client/core/http_client.py,sha256=cKs2w0ybDBk1wHQf-fTALm_MmvaMe3cZKcYJxqmCxkE,19539
|
@@ -223,7 +223,7 @@ vellum/client/types/add_openai_api_key_enum.py,sha256=GB7sLK_Ou7-Xn73sKJHUo6Gx3T
|
|
223
223
|
vellum/client/types/api_node_result.py,sha256=3zAbNGNYY6EgJDzqmcIvLqq8wU-WoMEwGT1W1fjto6U,729
|
224
224
|
vellum/client/types/api_node_result_data.py,sha256=qb0hMdyZvWnlqjfmzSf_AWyUYbBhwkXGoRXNtduSG0U,909
|
225
225
|
vellum/client/types/api_request_parent_context.py,sha256=7137NWvk4Lyx4jKZU1GhQYBneuSe34ObCuY4h2jXhoc,1389
|
226
|
-
vellum/client/types/api_version_enum.py,sha256=
|
226
|
+
vellum/client/types/api_version_enum.py,sha256=PObXD86PDEuoxBpLg2pIv_xEeHbN99r2tgAKnCiu5hk,166
|
227
227
|
vellum/client/types/array_chat_message_content.py,sha256=353TDzStNXA2dQETvnJrazCr33nlFx7hgvvPV526ECg,780
|
228
228
|
vellum/client/types/array_chat_message_content_item.py,sha256=udxHZTZLQ1Ekg6X1D616a61crYxlQDz8QU6-CAHznrQ,625
|
229
229
|
vellum/client/types/array_chat_message_content_item_request.py,sha256=3ULz8jtWvJIGl2SaGfw_hX9sKJxZaM4uUrZXyORlMhQ,742
|
@@ -742,7 +742,7 @@ vellum/client/types/workflow_deployment_release_workflow_deployment.py,sha256=ir
|
|
742
742
|
vellum/client/types/workflow_deployment_release_workflow_version.py,sha256=V1Eb3goBX2lle851LkhR1tbCFa0z_O-yhMuQWCN6c-g,773
|
743
743
|
vellum/client/types/workflow_error.py,sha256=EQajkEmLS64T0wYm0goHQl0rT7Lguurk8pLwkhjsgAI,282
|
744
744
|
vellum/client/types/workflow_event_error.py,sha256=HIewu_kh3KNPpWegAQArvAGHCp-cBIXqlUAAc_dBZhc,687
|
745
|
-
vellum/client/types/workflow_event_execution_read.py,sha256=
|
745
|
+
vellum/client/types/workflow_event_execution_read.py,sha256=a4Nv1UmNlMqDPqGrZLpyy3nNRtc1UqXmzXkeApSfstY,2089
|
746
746
|
vellum/client/types/workflow_execution_actual.py,sha256=YL5WL4O4CyaZWSrxqpE4chJ28EJlyScj5JeaLttegEg,843
|
747
747
|
vellum/client/types/workflow_execution_actual_chat_history_request.py,sha256=L6U8tgM7SiU4qGJMZChFzj6HfHgO-YAlTXfbT7ZIaE4,1993
|
748
748
|
vellum/client/types/workflow_execution_actual_json_request.py,sha256=5QYaPCSOwFnjH_kTrB2bTznTMFExSZdBhTkmelf1h4Q,1931
|
@@ -1681,9 +1681,9 @@ vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py,sha
|
|
1681
1681
|
vellum/workflows/nodes/displayable/tool_calling_node/__init__.py,sha256=3n0-ysmFKsr40CVxPthc0rfJgqVJeZuUEsCmYudLVRg,117
|
1682
1682
|
vellum/workflows/nodes/displayable/tool_calling_node/node.py,sha256=7MqEtw-RejpJ5Uer11tIFKRtklC4DfiWVx2-tpnA1Gg,6310
|
1683
1683
|
vellum/workflows/nodes/displayable/tool_calling_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1684
|
-
vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py,sha256=
|
1684
|
+
vellum/workflows/nodes/displayable/tool_calling_node/tests/test_node.py,sha256=3cCCIz-quneCVl3w8XnGxzyHx-nn_jGTKCbSH9jkYT0,7824
|
1685
1685
|
vellum/workflows/nodes/displayable/tool_calling_node/tests/test_utils.py,sha256=eu6WTyENhGLg9pGp_j69rysZjf_qiQXske1YdZn9PzU,1718
|
1686
|
-
vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=
|
1686
|
+
vellum/workflows/nodes/displayable/tool_calling_node/utils.py,sha256=6_SL3yOCh798MpPSuqGkIaLFeah6B7lj1bXI7_1hZ3s,11860
|
1687
1687
|
vellum/workflows/nodes/experimental/README.md,sha256=eF6DfIL8t-HbF9-mcofOMymKrraiBHDLKTlnBa51ZiE,284
|
1688
1688
|
vellum/workflows/nodes/experimental/__init__.py,sha256=k7VQEyvgEdnrEZ-icXx3fiByPnyMOnMXNGGuqacyyik,91
|
1689
1689
|
vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py,sha256=lsyD9laR9p7kx5-BXGH2gUTM242UhKy8SMV0SR6S2iE,90
|
@@ -1753,13 +1753,13 @@ vellum/workflows/utils/uuids.py,sha256=DFzPv9RCvsKhvdTEIQyfSek2A31D6S_QcmeLPbgrg
|
|
1753
1753
|
vellum/workflows/utils/vellum_variables.py,sha256=zVzZSRWKV64fmu9MaoEVebW9r8UsXPvRPMvOuBmwI24,5307
|
1754
1754
|
vellum/workflows/vellum_client.py,sha256=xkfoucodxNK5JR2-lbRqZx3xzDgExWkP6kySrpi_Ubc,1079
|
1755
1755
|
vellum/workflows/workflows/__init__.py,sha256=KY45TqvavCCvXIkyCFMEc0dc6jTMOUci93U2DUrlZYc,66
|
1756
|
-
vellum/workflows/workflows/base.py,sha256=
|
1756
|
+
vellum/workflows/workflows/base.py,sha256=DIQP5b4B5z3h0Bp1_IPdhpFAyBZxn_2owjFxnCEMBYA,27090
|
1757
1757
|
vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnadGsrSZGa7t7LpJA,2008
|
1758
1758
|
vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1759
|
-
vellum/workflows/workflows/tests/test_base_workflow.py,sha256=
|
1759
|
+
vellum/workflows/workflows/tests/test_base_workflow.py,sha256=ptMntHzVyy8ZuzNgeTuk7hREgKQ5UBdgq8VJFSGaW4Y,20832
|
1760
1760
|
vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
|
1761
|
-
vellum_ai-0.14.
|
1762
|
-
vellum_ai-0.14.
|
1763
|
-
vellum_ai-0.14.
|
1764
|
-
vellum_ai-0.14.
|
1765
|
-
vellum_ai-0.14.
|
1761
|
+
vellum_ai-0.14.87.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
|
1762
|
+
vellum_ai-0.14.87.dist-info/METADATA,sha256=PwSEW73LUo3jQfScYBT8yLOMUmskjprvWGDFbtHN47c,5556
|
1763
|
+
vellum_ai-0.14.87.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
1764
|
+
vellum_ai-0.14.87.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
|
1765
|
+
vellum_ai-0.14.87.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|