vellum-ai 0.14.27__py3-none-any.whl → 0.14.29__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/client/core/client_wrapper.py +1 -1
- vellum/client/core/jsonable_encoder.py +1 -1
- vellum/client/resources/documents/client.py +0 -14
- vellum/workflows/context.py +7 -2
- vellum/workflows/events/tests/test_event.py +29 -1
- vellum/workflows/events/types.py +62 -3
- vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +4 -2
- vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py +4 -2
- vellum/workflows/nodes/displayable/code_execution_node/utils.py +3 -2
- vellum/workflows/nodes/displayable/inline_prompt_node/tests/test_node.py +16 -3
- vellum/workflows/nodes/displayable/prompt_deployment_node/tests/test_node.py +11 -3
- vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py +2 -1
- vellum/workflows/references/lazy.py +5 -5
- vellum/workflows/state/store.py +13 -0
- vellum/workflows/workflows/base.py +3 -2
- {vellum_ai-0.14.27.dist-info → vellum_ai-0.14.29.dist-info}/METADATA +1 -1
- {vellum_ai-0.14.27.dist-info → vellum_ai-0.14.29.dist-info}/RECORD +24 -24
- vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +2 -1
- vellum_ee/workflows/display/nodes/vellum/tests/test_prompt_node.py +80 -1
- vellum_ee/workflows/server/virtual_file_loader.py +16 -4
- vellum_ee/workflows/tests/test_server.py +36 -11
- {vellum_ai-0.14.27.dist-info → vellum_ai-0.14.29.dist-info}/LICENSE +0 -0
- {vellum_ai-0.14.27.dist-info → vellum_ai-0.14.29.dist-info}/WHEEL +0 -0
- {vellum_ai-0.14.27.dist-info → vellum_ai-0.14.29.dist-info}/entry_points.txt +0 -0
@@ -18,7 +18,7 @@ class BaseClientWrapper:
|
|
18
18
|
headers: typing.Dict[str, str] = {
|
19
19
|
"X-Fern-Language": "Python",
|
20
20
|
"X-Fern-SDK-Name": "vellum-ai",
|
21
|
-
"X-Fern-SDK-Version": "0.14.
|
21
|
+
"X-Fern-SDK-Version": "0.14.29",
|
22
22
|
}
|
23
23
|
headers["X_API_KEY"] = self.api_key
|
24
24
|
return headers
|
@@ -45,7 +45,7 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any]
|
|
45
45
|
encoder = getattr(obj.__config__, "json_encoders", {}) # type: ignore # Pydantic v1
|
46
46
|
if custom_encoder:
|
47
47
|
encoder.update(custom_encoder)
|
48
|
-
obj_dict = obj.
|
48
|
+
obj_dict = obj.dict(by_alias=True)
|
49
49
|
if "__root__" in obj_dict:
|
50
50
|
obj_dict = obj_dict["__root__"]
|
51
51
|
if "root" in obj_dict:
|
@@ -277,13 +277,6 @@ class DocumentsClient:
|
|
277
277
|
|
278
278
|
**Note:** Uses a base url of `https://documents.vellum.ai`.
|
279
279
|
|
280
|
-
This is a multipart/form-data request. The `contents` field should be a file upload. It also expects a JSON body with the following fields:
|
281
|
-
- `add_to_index_names: list[str]` - Optionally include the names of all indexes that you'd like this document to be included in
|
282
|
-
- `external_id: str | None` - Optionally include an external ID for this document. This is useful if you want to re-upload the same document later when its contents change and would like it to be re-indexed.
|
283
|
-
- `label: str` - A human-friendly name for this document. Typically the filename.
|
284
|
-
- `keywords: list[str] | None` - Optionally include a list of keywords that'll be associated with this document. Used when performing keyword searches.
|
285
|
-
- `metadata: dict[str, Any]` - A stringified JSON object containing any metadata associated with the document that you'd like to filter upon later.
|
286
|
-
|
287
280
|
Parameters
|
288
281
|
----------
|
289
282
|
label : str
|
@@ -673,13 +666,6 @@ class AsyncDocumentsClient:
|
|
673
666
|
|
674
667
|
**Note:** Uses a base url of `https://documents.vellum.ai`.
|
675
668
|
|
676
|
-
This is a multipart/form-data request. The `contents` field should be a file upload. It also expects a JSON body with the following fields:
|
677
|
-
- `add_to_index_names: list[str]` - Optionally include the names of all indexes that you'd like this document to be included in
|
678
|
-
- `external_id: str | None` - Optionally include an external ID for this document. This is useful if you want to re-upload the same document later when its contents change and would like it to be re-indexed.
|
679
|
-
- `label: str` - A human-friendly name for this document. Typically the filename.
|
680
|
-
- `keywords: list[str] | None` - Optionally include a list of keywords that'll be associated with this document. Used when performing keyword searches.
|
681
|
-
- `metadata: dict[str, Any]` - A stringified JSON object containing any metadata associated with the document that you'd like to filter upon later.
|
682
|
-
|
683
669
|
Parameters
|
684
670
|
----------
|
685
671
|
label : str
|
vellum/workflows/context.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
from contextlib import contextmanager
|
2
|
+
from dataclasses import field
|
2
3
|
import threading
|
3
4
|
from uuid import UUID
|
4
5
|
from typing import Iterator, Optional, cast
|
@@ -8,8 +9,8 @@ from vellum.workflows.events.types import ParentContext
|
|
8
9
|
|
9
10
|
|
10
11
|
class ExecutionContext(UniversalBaseModel):
|
12
|
+
trace_id: UUID = field(default_factory=lambda: UUID("00000000-0000-0000-0000-000000000000"))
|
11
13
|
parent_context: Optional[ParentContext] = None
|
12
|
-
trace_id: Optional[UUID] = None
|
13
14
|
|
14
15
|
|
15
16
|
_CONTEXT_KEY = "_execution_context"
|
@@ -37,7 +38,11 @@ def execution_context(
|
|
37
38
|
) -> Iterator[None]:
|
38
39
|
"""Context manager for handling execution context."""
|
39
40
|
prev_context = get_execution_context()
|
40
|
-
set_trace_id =
|
41
|
+
set_trace_id = (
|
42
|
+
prev_context.trace_id
|
43
|
+
if int(prev_context.trace_id)
|
44
|
+
else trace_id or UUID("00000000-0000-0000-0000-000000000000")
|
45
|
+
)
|
41
46
|
set_parent_context = parent_context or prev_context.parent_context
|
42
47
|
set_context = ExecutionContext(parent_context=set_parent_context, trace_id=set_trace_id)
|
43
48
|
try:
|
@@ -4,6 +4,7 @@ from uuid import UUID
|
|
4
4
|
|
5
5
|
from deepdiff import DeepDiff
|
6
6
|
|
7
|
+
from vellum.client.core.pydantic_utilities import UniversalBaseModel
|
7
8
|
from vellum.workflows.constants import undefined
|
8
9
|
from vellum.workflows.errors.types import WorkflowError, WorkflowErrorCode
|
9
10
|
from vellum.workflows.events.node import (
|
@@ -14,7 +15,7 @@ from vellum.workflows.events.node import (
|
|
14
15
|
NodeExecutionStreamingBody,
|
15
16
|
NodeExecutionStreamingEvent,
|
16
17
|
)
|
17
|
-
from vellum.workflows.events.types import NodeParentContext, WorkflowParentContext
|
18
|
+
from vellum.workflows.events.types import NodeParentContext, ParentContext, WorkflowParentContext
|
18
19
|
from vellum.workflows.events.workflow import (
|
19
20
|
WorkflowExecutionFulfilledBody,
|
20
21
|
WorkflowExecutionFulfilledEvent,
|
@@ -419,3 +420,30 @@ mock_node_uuid = str(uuid4_from_hash(MockNode.__qualname__))
|
|
419
420
|
)
|
420
421
|
def test_event_serialization(event, expected_json):
|
421
422
|
assert not DeepDiff(event.model_dump(mode="json"), expected_json)
|
423
|
+
|
424
|
+
|
425
|
+
def test_parent_context__deserialize_from_json__invalid_parent_context():
|
426
|
+
# GIVEN an event with a parent context that Vellum is introducing in the future
|
427
|
+
data = {
|
428
|
+
"foo": "bar",
|
429
|
+
"parent": {
|
430
|
+
"type": "SOME_FUTURE_ENTITY",
|
431
|
+
"span_id": "123e4567-e89b-12d3-a456-426614174000",
|
432
|
+
"some_randome_field": "some_random_value",
|
433
|
+
"parent": None,
|
434
|
+
},
|
435
|
+
}
|
436
|
+
|
437
|
+
# AND a dataclass that references the parent context
|
438
|
+
class MyData(UniversalBaseModel):
|
439
|
+
foo: str
|
440
|
+
parent: ParentContext
|
441
|
+
|
442
|
+
# WHEN the data is deserialized
|
443
|
+
event = MyData.model_validate(data)
|
444
|
+
|
445
|
+
# THEN the event is deserialized correctly
|
446
|
+
assert event.parent
|
447
|
+
assert event.parent.type == "UNKNOWN"
|
448
|
+
assert event.parent.span_id == UUID("123e4567-e89b-12d3-a456-426614174000")
|
449
|
+
assert event.parent.parent is None
|
vellum/workflows/events/types.py
CHANGED
@@ -1,9 +1,10 @@
|
|
1
1
|
from datetime import datetime
|
2
2
|
import json
|
3
3
|
from uuid import UUID, uuid4
|
4
|
-
from typing import Annotated, Any, Dict, List, Literal, Optional, Union
|
4
|
+
from typing import Annotated, Any, Dict, List, Literal, Optional, Union, get_args
|
5
5
|
|
6
|
-
from pydantic import BeforeValidator, Field
|
6
|
+
from pydantic import BeforeValidator, Field, GetCoreSchemaHandler, Tag, ValidationInfo
|
7
|
+
from pydantic_core import CoreSchema, core_schema
|
7
8
|
|
8
9
|
from vellum.core.pydantic_utilities import UniversalBaseModel
|
9
10
|
from vellum.workflows.state.encoder import DefaultStateEncoder
|
@@ -112,6 +113,59 @@ class APIRequestParentContext(BaseParentContext):
|
|
112
113
|
type: Literal["API_REQUEST"] = "API_REQUEST"
|
113
114
|
|
114
115
|
|
116
|
+
class UnknownParentContext(BaseParentContext):
|
117
|
+
type: Literal["UNKNOWN"] = "UNKNOWN"
|
118
|
+
|
119
|
+
|
120
|
+
def _cast_parent_context_discriminator(v: Any) -> Any:
|
121
|
+
if v in PARENT_CONTEXT_TYPES:
|
122
|
+
return v
|
123
|
+
|
124
|
+
return "UNKNOWN"
|
125
|
+
|
126
|
+
|
127
|
+
def _get_parent_context_discriminator(v: Any) -> Any:
|
128
|
+
if isinstance(v, dict) and "type" in v:
|
129
|
+
return _cast_parent_context_discriminator(v["type"])
|
130
|
+
|
131
|
+
if isinstance(v, PARENT_CONTEXT_CHOICES):
|
132
|
+
return v.type
|
133
|
+
|
134
|
+
return _cast_parent_context_discriminator(v)
|
135
|
+
|
136
|
+
|
137
|
+
def _tag_parent_context_discriminator(v: Any) -> Any:
|
138
|
+
return Tag(_get_parent_context_discriminator(v))
|
139
|
+
|
140
|
+
|
141
|
+
def _validate_parent_context_discriminator(v: Any, info: ValidationInfo) -> Any:
|
142
|
+
if isinstance(v, str):
|
143
|
+
return _get_parent_context_discriminator(v)
|
144
|
+
|
145
|
+
if isinstance(v, dict) and "type" in v:
|
146
|
+
v["type"] = _get_parent_context_discriminator(v["type"])
|
147
|
+
|
148
|
+
return v
|
149
|
+
|
150
|
+
|
151
|
+
class ParentContextDiscriminator:
|
152
|
+
def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema:
|
153
|
+
original_schema = handler(source_type)
|
154
|
+
tagged_union_choices = {}
|
155
|
+
for index, choice in enumerate(original_schema["choices"]):
|
156
|
+
tagged_union_choices[Tag(PARENT_CONTEXT_TYPES[index])] = choice
|
157
|
+
|
158
|
+
tagged_union_schema = core_schema.tagged_union_schema(
|
159
|
+
tagged_union_choices,
|
160
|
+
_tag_parent_context_discriminator,
|
161
|
+
)
|
162
|
+
return core_schema.with_info_before_validator_function(
|
163
|
+
function=_validate_parent_context_discriminator,
|
164
|
+
schema=tagged_union_schema,
|
165
|
+
field_name="type",
|
166
|
+
)
|
167
|
+
|
168
|
+
|
115
169
|
# Define the discriminated union
|
116
170
|
ParentContext = Annotated[
|
117
171
|
Union[
|
@@ -121,8 +175,13 @@ ParentContext = Annotated[
|
|
121
175
|
PromptDeploymentParentContext,
|
122
176
|
WorkflowSandboxParentContext,
|
123
177
|
APIRequestParentContext,
|
178
|
+
UnknownParentContext,
|
124
179
|
],
|
125
|
-
|
180
|
+
ParentContextDiscriminator(),
|
181
|
+
]
|
182
|
+
PARENT_CONTEXT_CHOICES = get_args(get_args(ParentContext)[0])
|
183
|
+
PARENT_CONTEXT_TYPES = [
|
184
|
+
pc.model_fields["type"].default for pc in PARENT_CONTEXT_CHOICES if issubclass(pc, UniversalBaseModel)
|
126
185
|
]
|
127
186
|
|
128
187
|
# Update the forward references
|
@@ -140,8 +140,10 @@ class BaseInlinePromptNode(BasePromptNode[StateType], Generic[StateType]):
|
|
140
140
|
value=input_value,
|
141
141
|
)
|
142
142
|
)
|
143
|
-
elif
|
144
|
-
|
143
|
+
elif (
|
144
|
+
input_value
|
145
|
+
and isinstance(input_value, list)
|
146
|
+
and all(isinstance(message, (ChatMessage, ChatMessageRequest)) for message in input_value)
|
145
147
|
):
|
146
148
|
chat_history = [
|
147
149
|
message if isinstance(message, ChatMessage) else ChatMessage.model_validate(message.model_dump())
|
@@ -91,8 +91,10 @@ class BasePromptDeploymentNode(BasePromptNode, Generic[StateType]):
|
|
91
91
|
value=input_value,
|
92
92
|
)
|
93
93
|
)
|
94
|
-
elif
|
95
|
-
|
94
|
+
elif (
|
95
|
+
input_value
|
96
|
+
and isinstance(input_value, list)
|
97
|
+
and all(isinstance(message, (ChatMessage, ChatMessageRequest)) for message in input_value)
|
96
98
|
):
|
97
99
|
chat_history = [
|
98
100
|
(
|
@@ -14,10 +14,11 @@ def read_file_from_path(node_filepath: str, script_filepath: str) -> Union[str,
|
|
14
14
|
node_filepath_dir = os.path.dirname(node_filepath)
|
15
15
|
full_filepath = os.path.join(node_filepath_dir, script_filepath)
|
16
16
|
|
17
|
-
|
17
|
+
try:
|
18
18
|
with open(full_filepath) as file:
|
19
19
|
return file.read()
|
20
|
-
|
20
|
+
except (FileNotFoundError, IsADirectoryError):
|
21
|
+
return None
|
21
22
|
|
22
23
|
|
23
24
|
class ListWrapper(list):
|
@@ -9,6 +9,7 @@ from httpx import Response
|
|
9
9
|
from vellum.client.core.api_error import ApiError
|
10
10
|
from vellum.client.core.pydantic_utilities import UniversalBaseModel
|
11
11
|
from vellum.client.types.chat_message import ChatMessage
|
12
|
+
from vellum.client.types.chat_message_prompt_block import ChatMessagePromptBlock
|
12
13
|
from vellum.client.types.chat_message_request import ChatMessageRequest
|
13
14
|
from vellum.client.types.execute_prompt_event import ExecutePromptEvent
|
14
15
|
from vellum.client.types.fulfilled_execute_prompt_event import FulfilledExecutePromptEvent
|
@@ -43,6 +44,7 @@ def test_inline_prompt_node__json_inputs(vellum_adhoc_prompt_client):
|
|
43
44
|
"a_list": [1, 2, 3],
|
44
45
|
"a_dataclass": MyDataClass(hello="world"),
|
45
46
|
"a_pydantic": MyPydantic(example="example"),
|
47
|
+
"an_empty_list": [],
|
46
48
|
}
|
47
49
|
|
48
50
|
# AND a known response from invoking an inline prompt
|
@@ -74,8 +76,9 @@ def test_inline_prompt_node__json_inputs(vellum_adhoc_prompt_client):
|
|
74
76
|
PromptRequestJsonInput(key="a_list", type="JSON", value=[1, 2, 3]),
|
75
77
|
PromptRequestJsonInput(key="a_dataclass", type="JSON", value={"hello": "world"}),
|
76
78
|
PromptRequestJsonInput(key="a_pydantic", type="JSON", value={"example": "example"}),
|
79
|
+
PromptRequestJsonInput(key="an_empty_list", type="JSON", value=[]),
|
77
80
|
]
|
78
|
-
assert len(mock_api.call_args.kwargs["input_variables"]) ==
|
81
|
+
assert len(mock_api.call_args.kwargs["input_variables"]) == 5
|
79
82
|
|
80
83
|
|
81
84
|
def test_inline_prompt_node__function_definitions(vellum_adhoc_prompt_client):
|
@@ -241,7 +244,7 @@ def test_inline_prompt_node__parent_context(mock_httpx_transport, mock_complex_p
|
|
241
244
|
# GIVEN a prompt node
|
242
245
|
class MyNode(InlinePromptNode):
|
243
246
|
ml_model = "gpt-4o"
|
244
|
-
blocks = []
|
247
|
+
blocks = [ChatMessagePromptBlock(chat_role="USER", blocks=[])]
|
245
248
|
prompt_inputs = {}
|
246
249
|
|
247
250
|
# AND a known response from the httpx client
|
@@ -276,6 +279,16 @@ def test_inline_prompt_node__parent_context(mock_httpx_transport, mock_complex_p
|
|
276
279
|
|
277
280
|
# AND the prompt is executed with the correct execution context
|
278
281
|
call_request_args = mock_httpx_transport.handle_request.call_args_list[0][0][0]
|
279
|
-
|
282
|
+
call_request = json.loads(call_request_args.read().decode("utf-8"))
|
283
|
+
request_execution_context = call_request["execution_context"]
|
280
284
|
assert request_execution_context["trace_id"] == str(trace_id)
|
281
285
|
assert request_execution_context["parent_context"]
|
286
|
+
|
287
|
+
# AND the blocks are serialized as expected
|
288
|
+
assert call_request["blocks"] == [
|
289
|
+
{
|
290
|
+
"block_type": "CHAT_MESSAGE",
|
291
|
+
"chat_role": "USER",
|
292
|
+
"blocks": [],
|
293
|
+
}
|
294
|
+
]
|
@@ -61,14 +61,22 @@ def test_run_node__chat_history_input(vellum_client, ChatMessageClass):
|
|
61
61
|
]
|
62
62
|
|
63
63
|
|
64
|
-
|
64
|
+
@pytest.mark.parametrize(
|
65
|
+
"input_value",
|
66
|
+
[
|
67
|
+
["apple", "banana", "cherry"],
|
68
|
+
[],
|
69
|
+
],
|
70
|
+
ids=["non_empty_array", "empty_array"],
|
71
|
+
)
|
72
|
+
def test_run_node__any_array_input(vellum_client, input_value):
|
65
73
|
"""Confirm that we can successfully invoke a Prompt Deployment Node that uses any array input"""
|
66
74
|
|
67
75
|
# GIVEN a Prompt Deployment Node
|
68
76
|
class ExamplePromptDeploymentNode(PromptDeploymentNode):
|
69
77
|
deployment = "example_prompt_deployment"
|
70
78
|
prompt_inputs = {
|
71
|
-
"fruits":
|
79
|
+
"fruits": input_value,
|
72
80
|
}
|
73
81
|
|
74
82
|
# AND we know what the Prompt Deployment will respond with
|
@@ -97,7 +105,7 @@ def test_run_node__any_array_input(vellum_client):
|
|
97
105
|
# AND we should have invoked the Prompt Deployment with the expected inputs
|
98
106
|
call_kwargs = vellum_client.execute_prompt_stream.call_args.kwargs
|
99
107
|
assert call_kwargs["inputs"] == [
|
100
|
-
JsonInputRequest(name="fruits", value=
|
108
|
+
JsonInputRequest(name="fruits", value=input_value),
|
101
109
|
]
|
102
110
|
|
103
111
|
|
@@ -1,3 +1,4 @@
|
|
1
|
+
from unittest import mock
|
1
2
|
from uuid import uuid4
|
2
3
|
from typing import Any, Iterator, List
|
3
4
|
|
@@ -75,6 +76,6 @@ def test_text_prompt_deployment_node__basic(vellum_client):
|
|
75
76
|
raw_overrides=OMIT,
|
76
77
|
release_tag="LATEST",
|
77
78
|
request_options={
|
78
|
-
"additional_body_parameters": {"execution_context": {"parent_context": None, "trace_id":
|
79
|
+
"additional_body_parameters": {"execution_context": {"parent_context": None, "trace_id": mock.ANY}}
|
79
80
|
},
|
80
81
|
)
|
@@ -30,17 +30,17 @@ class LazyReference(BaseDescriptor[_T], Generic[_T]):
|
|
30
30
|
from vellum.workflows.descriptors.utils import resolve_value
|
31
31
|
|
32
32
|
if isinstance(self._get, str):
|
33
|
-
#
|
34
|
-
#
|
35
|
-
# passed in, similar to get_workflow_context(). Because we don't want this to slow down p1 issues
|
36
|
-
# that we are debugging with existing workflows, using the following workaround for now.
|
33
|
+
# We are comparing Output string references - when if we want to be exact,
|
34
|
+
# should be comparing the Output class themselves
|
37
35
|
for output_reference, value in state.meta.node_outputs.items():
|
38
36
|
if str(output_reference) == self._get:
|
39
37
|
return value
|
40
38
|
|
39
|
+
child_reference = self.resolve(state.meta.parent) if state.meta.parent else None
|
40
|
+
|
41
41
|
# Fix typing surrounding the return value of node outputs/output descriptors
|
42
42
|
# https://app.shortcut.com/vellum/story/4783
|
43
|
-
return undefined # type: ignore[return-value]
|
43
|
+
return child_reference or undefined # type: ignore[return-value]
|
44
44
|
|
45
45
|
return resolve_value(self._get(), state)
|
46
46
|
|
vellum/workflows/state/store.py
CHANGED
@@ -26,3 +26,16 @@ class Store:
|
|
26
26
|
@property
|
27
27
|
def state_snapshots(self) -> Iterator[BaseState]:
|
28
28
|
return iter(self._state_snapshots)
|
29
|
+
|
30
|
+
|
31
|
+
class EmptyStore(Store):
|
32
|
+
"""
|
33
|
+
A store that does not record any events or state snapshots, for workflows
|
34
|
+
that want to opt out of the memory footprint of the traditional store entirely.
|
35
|
+
"""
|
36
|
+
|
37
|
+
def append_event(self, event: WorkflowEvent) -> None:
|
38
|
+
pass
|
39
|
+
|
40
|
+
def append_state_snapshot(self, state: BaseState) -> None:
|
41
|
+
pass
|
@@ -165,12 +165,13 @@ class BaseWorkflow(Generic[InputsType, StateType], metaclass=_BaseWorkflowMeta):
|
|
165
165
|
parent_state: Optional[BaseState] = None,
|
166
166
|
emitters: Optional[List[BaseWorkflowEmitter]] = None,
|
167
167
|
resolvers: Optional[List[BaseWorkflowResolver]] = None,
|
168
|
+
store: Optional[Store] = None,
|
168
169
|
):
|
169
170
|
self._parent_state = parent_state
|
170
171
|
self.emitters = emitters or (self.emitters if hasattr(self, "emitters") else [])
|
171
172
|
self.resolvers = resolvers or (self.resolvers if hasattr(self, "resolvers") else [])
|
172
173
|
self._context = context or WorkflowContext()
|
173
|
-
self._store = Store()
|
174
|
+
self._store = store or Store()
|
174
175
|
self._execution_context = self._context.execution_context
|
175
176
|
|
176
177
|
self.validate()
|
@@ -488,7 +489,7 @@ class BaseWorkflow(Generic[InputsType, StateType], metaclass=_BaseWorkflowMeta):
|
|
488
489
|
workflow_inputs=workflow_inputs or self.get_default_inputs(),
|
489
490
|
trace_id=execution_context.trace_id,
|
490
491
|
)
|
491
|
-
if execution_context and execution_context.trace_id
|
492
|
+
if execution_context and int(execution_context.trace_id)
|
492
493
|
else StateMeta(
|
493
494
|
parent=self._parent_state,
|
494
495
|
workflow_inputs=workflow_inputs or self.get_default_inputs(),
|
@@ -41,7 +41,7 @@ vellum_ee/workflows/display/nodes/vellum/conditional_node.py,sha256=ybLIa4uclqVI
|
|
41
41
|
vellum_ee/workflows/display/nodes/vellum/error_node.py,sha256=I1Jkp2htRINJATtv1e-zs9BrReFX842djpiVgBPHDYg,2186
|
42
42
|
vellum_ee/workflows/display/nodes/vellum/final_output_node.py,sha256=BJ--Y-LCbGFJve3OFEKHVxrw8TKvgb342opYLJibToc,3128
|
43
43
|
vellum_ee/workflows/display/nodes/vellum/guardrail_node.py,sha256=aYZSJTxknU4LMiQdWk9LcK6CkhdozeDEMiRxfAyUNEc,2202
|
44
|
-
vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=
|
44
|
+
vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=86hkneLIBS4Jel3GWsPVIIFqXGD3RHIpXw0iGa_Zues,8843
|
45
45
|
vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py,sha256=MU9I8CB1X1TgL1aa1eT6DHWwNJ-2v79t74xl0oy-fBo,5510
|
46
46
|
vellum_ee/workflows/display/nodes/vellum/map_node.py,sha256=8CPnn06HIBxBOiECevUffeVmQmCpec6WtPQnNl9gj9Y,3748
|
47
47
|
vellum_ee/workflows/display/nodes/vellum/merge_node.py,sha256=xtyecs9mJ_WEwVpP12jxYwvehLXynhqLrPJ-Ahdk2GA,3232
|
@@ -54,7 +54,7 @@ vellum_ee/workflows/display/nodes/vellum/templating_node.py,sha256=5EWzdA3TSUPlb
|
|
54
54
|
vellum_ee/workflows/display/nodes/vellum/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
55
55
|
vellum_ee/workflows/display/nodes/vellum/tests/test_code_execution_node.py,sha256=p0fCvbKzpGvVrg67QmJd14m9E8_DG0u5s6SYIhzlkNA,4018
|
56
56
|
vellum_ee/workflows/display/nodes/vellum/tests/test_error_node.py,sha256=ulrpoYUW-5kIxfG4Lf5F2p0k_EoYKhmahEbF3P_eruM,1648
|
57
|
-
vellum_ee/workflows/display/nodes/vellum/tests/test_prompt_node.py,sha256=
|
57
|
+
vellum_ee/workflows/display/nodes/vellum/tests/test_prompt_node.py,sha256=fu9nxD4FInSfKbipJJ7UE617VkeUWs_uS6SeEz_8-Iw,4691
|
58
58
|
vellum_ee/workflows/display/nodes/vellum/tests/test_retry_node.py,sha256=NuIw8Yb42KUdoGi3Ur8_7VPg50IC4hNrwAkCociwqNk,2091
|
59
59
|
vellum_ee/workflows/display/nodes/vellum/tests/test_templating_node.py,sha256=Us32jf_FQnLuT4Bs2o5JyHxihCTAN8ozZghWIR0pl9k,3459
|
60
60
|
vellum_ee/workflows/display/nodes/vellum/tests/test_try_node.py,sha256=mtzB8LJlFCHVFM4H5AanLp29gQfaVmnN4A4iaRGJHoI,2427
|
@@ -100,7 +100,7 @@ vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py,sha25
|
|
100
100
|
vellum_ee/workflows/display/workflows/tests/test_workflow_display.py,sha256=nD6_lZnNp56siVJwhlWzSEHdMaSKjvWlsJa31SqfQAE,10623
|
101
101
|
vellum_ee/workflows/display/workflows/vellum_workflow_display.py,sha256=AzGZ7ApiwXAHuymTJoXJketUegyC1dmB1blzoni5eh8,13423
|
102
102
|
vellum_ee/workflows/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
103
|
-
vellum_ee/workflows/server/virtual_file_loader.py,sha256=
|
103
|
+
vellum_ee/workflows/server/virtual_file_loader.py,sha256=7JphJcSO3H85qiC2DpFfBWjC3JjrbRmoynBC6KKHVsA,2710
|
104
104
|
vellum_ee/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
105
105
|
vellum_ee/workflows/tests/local_files/__init__.py,sha256=UyP6kKkRqr9cTKHQF4MVLdLk5MM9GGcLuqxXsQGm22Y,51
|
106
106
|
vellum_ee/workflows/tests/local_files/base_class.py,sha256=UuiC7J68MVr6A4949QYiBpXOLdsvFG_Cw1muEPiHT6I,298
|
@@ -121,18 +121,18 @@ vellum_ee/workflows/tests/local_workflow/nodes/final_output.py,sha256=ZX7zBv87zi
|
|
121
121
|
vellum_ee/workflows/tests/local_workflow/nodes/templating_node.py,sha256=NQwFN61QkHfI3Vssz-B0NKGfupK8PU0FDSAIAhYBLi0,325
|
122
122
|
vellum_ee/workflows/tests/local_workflow/workflow.py,sha256=A4qOzOPNwePYxWbcAgIPLsmrVS_aVEZEc-wULSv787Q,393
|
123
123
|
vellum_ee/workflows/tests/test_display_meta.py,sha256=C25dErwghPNXio49pvSRxyOuc96srH6eYEwTAWdE2zY,2258
|
124
|
-
vellum_ee/workflows/tests/test_server.py,sha256=
|
124
|
+
vellum_ee/workflows/tests/test_server.py,sha256=Ll4o9gg0Q4r8uX6Kt8LWgIz0u2zLwPiZxl3TuqoZpxg,4707
|
125
125
|
vellum_ee/workflows/tests/test_virtual_files.py,sha256=TJEcMR0v2S8CkloXNmCHA0QW0K6pYNGaIjraJz7sFvY,2762
|
126
126
|
vellum/__init__.py,sha256=88-79I29hBTQvR1uH_BOCGMWuj2a4Nx82R_8KIESg28,40470
|
127
127
|
vellum/client/README.md,sha256=JkCJjmMZl4jrPj46pkmL9dpK4gSzQQmP5I7z4aME4LY,4749
|
128
128
|
vellum/client/__init__.py,sha256=Jv9sI5BNFo2OYA9px_aREFSIp655ryC3eaZSRI6yH1k,117826
|
129
129
|
vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
|
130
130
|
vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
131
|
-
vellum/client/core/client_wrapper.py,sha256=
|
131
|
+
vellum/client/core/client_wrapper.py,sha256=WRK8WU4WFDaADxcUMi93lvUkCQp0McOsjxKX1PJ4Zyc,1869
|
132
132
|
vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
133
133
|
vellum/client/core/file.py,sha256=X9IbmkZmB2bB_DpmZAO3crWdXagOakAyn6UCOCImCPg,2322
|
134
134
|
vellum/client/core/http_client.py,sha256=R0pQpCppnEtxccGvXl4uJ76s7ro_65Fo_erlNNLp_AI,19228
|
135
|
-
vellum/client/core/jsonable_encoder.py,sha256=
|
135
|
+
vellum/client/core/jsonable_encoder.py,sha256=qaF1gtgH-kQZb4kJskETwcCsOPUof-NnYVdszHkb-dM,3656
|
136
136
|
vellum/client/core/pydantic_utilities.py,sha256=6ev3gtER-hjlq7PcPL9XT_YSCdgyCE8ZKHJ9Uc-gHIg,12071
|
137
137
|
vellum/client/core/query_encoder.py,sha256=ekulqNd0j8TgD7ox-Qbz7liqX8-KP9blvT9DsRCenYM,2144
|
138
138
|
vellum/client/core/remove_none_from_dict.py,sha256=EU9SGgYidWq7SexuJbNs4-PZ-5Bl3Vppd864mS6vQZw,342
|
@@ -159,7 +159,7 @@ vellum/client/resources/document_indexes/client.py,sha256=UcznU0NyvdNBpV4UCsTqG3
|
|
159
159
|
vellum/client/resources/document_indexes/types/__init__.py,sha256=IoFqKHN_VBdEhC7VL8_6Jbatrn0e0zuYEJAJUahcUR0,196
|
160
160
|
vellum/client/resources/document_indexes/types/document_indexes_list_request_status.py,sha256=sfUEB0cvOSmlE2iITqnMVyHv05Zy2fWP4QjCIYqMg0M,178
|
161
161
|
vellum/client/resources/documents/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
162
|
-
vellum/client/resources/documents/client.py,sha256=
|
162
|
+
vellum/client/resources/documents/client.py,sha256=DOiZ0i0iuGxPWTxDZsd8kndZPMB4GslHvyj1gBkrTIc,26048
|
163
163
|
vellum/client/resources/folder_entities/__init__.py,sha256=QOp7UMMB3a32GrfVaud35ECn4fqPBKXxCyClsDgd6GE,175
|
164
164
|
vellum/client/resources/folder_entities/client.py,sha256=xkT6D1TwPxvf1eXgDhRpKg7_O2V78jwBsIGyJgnI5SY,11110
|
165
165
|
vellum/client/resources/folder_entities/types/__init__.py,sha256=cHabrupjC-HL3kj-UZ9WdVzqHoQHCu6QsLFB3wlOs7k,212
|
@@ -1430,7 +1430,7 @@ vellum/version.py,sha256=jq-1PlAYxN9AXuaZqbYk9ak27SgE2lw9Ia5gx1b1gVI,76
|
|
1430
1430
|
vellum/workflows/README.md,sha256=hZdTKBIcsTKPofK68oPkBhyt0nnRh0csqC12k4FMHHA,3597
|
1431
1431
|
vellum/workflows/__init__.py,sha256=CssPsbNvN6rDhoLuqpEv7MMKGa51vE6dvAh6U31Pcio,71
|
1432
1432
|
vellum/workflows/constants.py,sha256=2yg4_uo5gpqViy3ZLSwfC8qTybleYCtOnhA4Rj6bacM,1310
|
1433
|
-
vellum/workflows/context.py,sha256=
|
1433
|
+
vellum/workflows/context.py,sha256=jvMuyeRluay8BQa7GX1TqUlmoHLCycAVYKkp87sfXSo,1644
|
1434
1434
|
vellum/workflows/descriptors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1435
1435
|
vellum/workflows/descriptors/base.py,sha256=bvF3MWsc4Xyw5Z2s1A0fbsfMCebIbPYcGvbQ9uoa_Pg,14655
|
1436
1436
|
vellum/workflows/descriptors/exceptions.py,sha256=gUy4UD9JFUKSeQnQpeuDSLiRqWjWiIsxLahB7p_q3JY,54
|
@@ -1447,8 +1447,8 @@ vellum/workflows/errors/types.py,sha256=tVW7Il9zalnwWzdoDLqYPIvRTOhXIv6FPORZAbU7
|
|
1447
1447
|
vellum/workflows/events/__init__.py,sha256=6pxxceJo2dcaRkWtkDAYlUQZ-PHBQSZytIoyuUK48Qw,759
|
1448
1448
|
vellum/workflows/events/node.py,sha256=jbmNHjdp331Q1IRK-AWtAxwF6Lidb9R7__N5rQuilE8,5401
|
1449
1449
|
vellum/workflows/events/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1450
|
-
vellum/workflows/events/tests/test_event.py,sha256=
|
1451
|
-
vellum/workflows/events/types.py,sha256=
|
1450
|
+
vellum/workflows/events/tests/test_event.py,sha256=WRxjOO1470rFH40O56RWjhonIdupW782h_FRAhIQZCQ,17823
|
1451
|
+
vellum/workflows/events/types.py,sha256=cKXEZEZ4C_O38CH-qiu8nYSMy2DVJ66lQayJO5A-haU,5690
|
1452
1452
|
vellum/workflows/events/workflow.py,sha256=xdqU6WOexaAqzJbU2Zw42o2LJhK7SDPtTFO5REGv17I,7293
|
1453
1453
|
vellum/workflows/exceptions.py,sha256=NiBiR3ggfmPxBVqD-H1SqmjI-7mIn0EStSN1BqApvCM,1213
|
1454
1454
|
vellum/workflows/expressions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -1533,10 +1533,10 @@ vellum/workflows/nodes/displayable/bases/base_prompt_node/__init__.py,sha256=Org
|
|
1533
1533
|
vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py,sha256=HGNoGLJ9lbqflGdYFDIiuHFyi0iJ-agJu4kkJ7D3dGs,3212
|
1534
1534
|
vellum/workflows/nodes/displayable/bases/inline_prompt_node/__init__.py,sha256=Hl35IAoepRpE-j4cALaXVJIYTYOF3qszyVbxTj4kS1s,82
|
1535
1535
|
vellum/workflows/nodes/displayable/bases/inline_prompt_node/constants.py,sha256=fnjiRWLoRlC4Puo5oQcpZD5Hd-EesxsAo9l5tGAkpZQ,270
|
1536
|
-
vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=
|
1536
|
+
vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=rga24gkK9_STRhFwhBwGL7oHhTTZvLWS_rXHHrp85p4,8386
|
1537
1537
|
vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1538
1538
|
vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py,sha256=ZCXCZs-_OyPk4nqCpuWY-vw87lg92TDZ2tK_gckJ7mg,10450
|
1539
|
-
vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py,sha256=
|
1539
|
+
vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py,sha256=mjTAN2GERUaoKVsr5h1ecW60Hw_fZ-EYzgaKMZ33P-s,5674
|
1540
1540
|
vellum/workflows/nodes/displayable/bases/search_node.py,sha256=3UtbqY3QO4kzfJHbmUNZGnEEfJmaoiF892u8H6TGjp8,5381
|
1541
1541
|
vellum/workflows/nodes/displayable/bases/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1542
1542
|
vellum/workflows/nodes/displayable/bases/tests/test_utils.py,sha256=eqdqbKNRWVMDPevgwLg1i6YK0g4L4bCy-7xCBN5yYZI,3156
|
@@ -1548,7 +1548,7 @@ vellum/workflows/nodes/displayable/code_execution_node/tests/__init__.py,sha256=
|
|
1548
1548
|
vellum/workflows/nodes/displayable/code_execution_node/tests/fixtures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1549
1549
|
vellum/workflows/nodes/displayable/code_execution_node/tests/fixtures/main.py,sha256=5QsbmkzSlSbcbWTG_JmIqcP-JNJzOPTKxGzdHos19W4,79
|
1550
1550
|
vellum/workflows/nodes/displayable/code_execution_node/tests/test_code_execution_node.py,sha256=xAaoOfQHQUlp0iKlig87t0aT2cJM-8PxiTb1QDg8VmY,24641
|
1551
|
-
vellum/workflows/nodes/displayable/code_execution_node/utils.py,sha256=
|
1551
|
+
vellum/workflows/nodes/displayable/code_execution_node/utils.py,sha256=PI0IQysC3uASv4nof23O4gIWpoNl3tRleb1q417bfTw,3896
|
1552
1552
|
vellum/workflows/nodes/displayable/conditional_node/__init__.py,sha256=AS_EIqFdU1F9t8aLmbZU-rLh9ry6LCJ0uj0D8F0L5Uw,72
|
1553
1553
|
vellum/workflows/nodes/displayable/conditional_node/node.py,sha256=Qjfl33gZ3JEgxBA1EgzSUebboGvsARthIxxcQyvx5Gg,1152
|
1554
1554
|
vellum/workflows/nodes/displayable/conftest.py,sha256=tD_WIiw5WjFqnzgnGLtEZDaMj2XhQ1DptnBTKYeBbI0,5705
|
@@ -1562,7 +1562,7 @@ vellum/workflows/nodes/displayable/guardrail_node/test_node.py,sha256=1yPIAt4_GW
|
|
1562
1562
|
vellum/workflows/nodes/displayable/inline_prompt_node/__init__.py,sha256=gSUOoEZLlrx35-tQhSAd3An8WDwBqyiQh-sIebLU9wU,74
|
1563
1563
|
vellum/workflows/nodes/displayable/inline_prompt_node/node.py,sha256=8RXZqWMzViUjFfbpmcy1gkSsKnEpci8BGwsuPYv4xMQ,3380
|
1564
1564
|
vellum/workflows/nodes/displayable/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1565
|
-
vellum/workflows/nodes/displayable/inline_prompt_node/tests/test_node.py,sha256=
|
1565
|
+
vellum/workflows/nodes/displayable/inline_prompt_node/tests/test_node.py,sha256=2Xg37rrzWA5-LrLjO3yQQN0hMn6gDQWyPv6Lye64ujQ,10663
|
1566
1566
|
vellum/workflows/nodes/displayable/merge_node/__init__.py,sha256=J8IC08dSH7P76wKlNuxe1sn7toNGtSQdFirUbtPDEs0,60
|
1567
1567
|
vellum/workflows/nodes/displayable/merge_node/node.py,sha256=nZtGGVAvY4fvGg8vwV6sTQ8_QLRnigeXt0vf2FL272A,450
|
1568
1568
|
vellum/workflows/nodes/displayable/note_node/__init__.py,sha256=KWA3P4fyYJ-fOTky8qNGlcOotQ-HeHJ9AjZt6mRQmCE,58
|
@@ -1570,7 +1570,7 @@ vellum/workflows/nodes/displayable/note_node/node.py,sha256=sIN1VBQ7zeT3GhN0kupX
|
|
1570
1570
|
vellum/workflows/nodes/displayable/prompt_deployment_node/__init__.py,sha256=krX1Hds-TSVYZsx0wJFX4wsAKkEFYOX1ifwRGiIM-EA,82
|
1571
1571
|
vellum/workflows/nodes/displayable/prompt_deployment_node/node.py,sha256=pb-KbrnfTRL7mmNtVAMmiCiys8raXkl5Od7sIu682xU,2707
|
1572
1572
|
vellum/workflows/nodes/displayable/prompt_deployment_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1573
|
-
vellum/workflows/nodes/displayable/prompt_deployment_node/tests/test_node.py,sha256=
|
1573
|
+
vellum/workflows/nodes/displayable/prompt_deployment_node/tests/test_node.py,sha256=mHSecwE8bcwduM5wNKwDTzlLeh7ECdEEuT86BDgByPY,5798
|
1574
1574
|
vellum/workflows/nodes/displayable/search_node/__init__.py,sha256=hpBpvbrDYf43DElRZFLzieSn8weXiwNiiNOJurERQbs,62
|
1575
1575
|
vellum/workflows/nodes/displayable/search_node/node.py,sha256=_VHHuTNN4icZBgc7O5U9SVKrv1zgKipU72fOtxTyrQU,1453
|
1576
1576
|
vellum/workflows/nodes/displayable/search_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -1582,7 +1582,7 @@ vellum/workflows/nodes/displayable/subworkflow_deployment_node/tests/test_node.p
|
|
1582
1582
|
vellum/workflows/nodes/displayable/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1583
1583
|
vellum/workflows/nodes/displayable/tests/test_inline_text_prompt_node.py,sha256=LaxohBcKfSW2PSiBBlx67FdW_q4YC2BM2ouH-vuGPAA,4700
|
1584
1584
|
vellum/workflows/nodes/displayable/tests/test_search_node_wth_text_output.py,sha256=VepO5z1277c1y5N6LLIC31nnWD1aak2m5oPFplfJHHs,6935
|
1585
|
-
vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py,sha256=
|
1585
|
+
vellum/workflows/nodes/displayable/tests/test_text_prompt_deployment_node.py,sha256=sGlAzTgKz8OsqeT3e7FbQyPpvs_2Fk9_jfD6BRyc6M0,2628
|
1586
1586
|
vellum/workflows/nodes/experimental/README.md,sha256=eF6DfIL8t-HbF9-mcofOMymKrraiBHDLKTlnBa51ZiE,284
|
1587
1587
|
vellum/workflows/nodes/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1588
1588
|
vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py,sha256=lsyD9laR9p7kx5-BXGH2gUTM242UhKy8SMV0SR6S2iE,90
|
@@ -1604,7 +1604,7 @@ vellum/workflows/references/environment_variable.py,sha256=-gfOcdYwVp9ztSUYz6h2W
|
|
1604
1604
|
vellum/workflows/references/execution_count.py,sha256=JILHqt8ELdc9ct-WsVCA5X-rKiP1rmJODw-XTf4kpHI,722
|
1605
1605
|
vellum/workflows/references/external_input.py,sha256=c_4SojTpykCSbGS1Pjmx9FfquyYGMPksoj0AbrWv7Go,2064
|
1606
1606
|
vellum/workflows/references/input.py,sha256=3INu-TLTi4dziWmva6LO3WvgDlPzsjayUx61cVvqLJA,325
|
1607
|
-
vellum/workflows/references/lazy.py,sha256=
|
1607
|
+
vellum/workflows/references/lazy.py,sha256=jgUYmgt-yAybzPf_R-74MzdU8VuNwMYI8EQqrj9lVR0,2948
|
1608
1608
|
vellum/workflows/references/node.py,sha256=LP854wDVs-9I_aZ7-nkbwXqL2H7W2_3LED2e9FixNS8,1418
|
1609
1609
|
vellum/workflows/references/output.py,sha256=-g97wkZDf-6_l_-NM4C_8v1VBt2c7NCPhtrG2lBLKSc,2808
|
1610
1610
|
vellum/workflows/references/state_value.py,sha256=bInUF0A3Pt4-zhA0f6LdSuyv8tz7n5QRkHAEn4gsmqI,711
|
@@ -1620,7 +1620,7 @@ vellum/workflows/state/__init__.py,sha256=yUUdR-_Vl7UiixNDYQZ-GEM_kJI9dnOia75Ttu
|
|
1620
1620
|
vellum/workflows/state/base.py,sha256=Vkhneko3VlQrPsMLU1PYSzXU_W1u7_AraJsghiv5O-4,15512
|
1621
1621
|
vellum/workflows/state/context.py,sha256=yePVr4CCTQn5bjo1697JOO24fKFQpVNzooL07xL4gL0,2702
|
1622
1622
|
vellum/workflows/state/encoder.py,sha256=TnOQojc5lTQ83g9QbpA4UCqShJvutmTMxbpKt-9gNe4,1911
|
1623
|
-
vellum/workflows/state/store.py,sha256=
|
1623
|
+
vellum/workflows/state/store.py,sha256=uVe-oN73KwGV6M6YLhwZMMUQhzTQomsVfVnb8V91gVo,1147
|
1624
1624
|
vellum/workflows/state/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1625
1625
|
vellum/workflows/state/tests/test_state.py,sha256=jBynFR4m74Vn51DdmKBLkxb1loTy1CnJPtzPmdAFQUo,5159
|
1626
1626
|
vellum/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -1645,13 +1645,13 @@ vellum/workflows/utils/uuids.py,sha256=DFzPv9RCvsKhvdTEIQyfSek2A31D6S_QcmeLPbgrg
|
|
1645
1645
|
vellum/workflows/utils/vellum_variables.py,sha256=UiGlUh0a8vel2FbW3w-xbHxSv_jNutkDdqMVtP_b42A,3385
|
1646
1646
|
vellum/workflows/vellum_client.py,sha256=GxOy3dX6A04xiY69vPv1S4YGuQ_TMxwHi6WRMimQBBE,762
|
1647
1647
|
vellum/workflows/workflows/__init__.py,sha256=KY45TqvavCCvXIkyCFMEc0dc6jTMOUci93U2DUrlZYc,66
|
1648
|
-
vellum/workflows/workflows/base.py,sha256=
|
1648
|
+
vellum/workflows/workflows/base.py,sha256=DycL6YgN8CyaHub2gG8r9NB5PVxR9jP0wV4J09iaBnE,22749
|
1649
1649
|
vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnadGsrSZGa7t7LpJA,2008
|
1650
1650
|
vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1651
1651
|
vellum/workflows/workflows/tests/test_base_workflow.py,sha256=tCxrV3QBHL8wfdEO3bvKteDdw32xBlUl1_WxkAwaONw,8344
|
1652
1652
|
vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
|
1653
|
-
vellum_ai-0.14.
|
1654
|
-
vellum_ai-0.14.
|
1655
|
-
vellum_ai-0.14.
|
1656
|
-
vellum_ai-0.14.
|
1657
|
-
vellum_ai-0.14.
|
1653
|
+
vellum_ai-0.14.29.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
|
1654
|
+
vellum_ai-0.14.29.dist-info/METADATA,sha256=KgSQqeoqVLwdeQuikF8QxV4hr9OaVVyym5OlArQRyPM,5484
|
1655
|
+
vellum_ai-0.14.29.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
1656
|
+
vellum_ai-0.14.29.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
|
1657
|
+
vellum_ai-0.14.29.dist-info/RECORD,,
|
@@ -94,7 +94,8 @@ class BaseInlinePromptNodeDisplay(BaseNodeVellumDisplay[_InlinePromptNodeType],
|
|
94
94
|
input_name=variable_name,
|
95
95
|
value=variable_value,
|
96
96
|
display_context=display_context,
|
97
|
-
input_id=self.node_input_ids_by_name.get(variable_name)
|
97
|
+
input_id=self.node_input_ids_by_name.get(f"{InlinePromptNode.prompt_inputs.name}.{variable_name}")
|
98
|
+
or self.node_input_ids_by_name.get(variable_name),
|
98
99
|
)
|
99
100
|
vellum_variable_type = infer_vellum_variable_type(variable_value)
|
100
101
|
node_inputs.append(node_input)
|
@@ -1,7 +1,12 @@
|
|
1
|
+
import pytest
|
2
|
+
from uuid import UUID
|
3
|
+
from typing import Type
|
4
|
+
|
1
5
|
from vellum.workflows import BaseWorkflow
|
2
6
|
from vellum.workflows.nodes import BaseNode
|
3
7
|
from vellum.workflows.nodes.displayable.inline_prompt_node.node import InlinePromptNode
|
4
8
|
from vellum.workflows.references.lazy import LazyReference
|
9
|
+
from vellum_ee.workflows.display.nodes.vellum.inline_prompt_node import BaseInlinePromptNodeDisplay
|
5
10
|
from vellum_ee.workflows.display.workflows.get_vellum_workflow_display_class import get_workflow_display
|
6
11
|
from vellum_ee.workflows.display.workflows.vellum_workflow_display import VellumWorkflowDisplay
|
7
12
|
|
@@ -34,7 +39,7 @@ def test_serialize_node__lazy_reference_in_prompt_inputs():
|
|
34
39
|
|
35
40
|
assert lazy_reference_node["inputs"] == [
|
36
41
|
{
|
37
|
-
"id": "
|
42
|
+
"id": "aa81c1bc-d5d8-4ae8-8946-e9f4d0c1ab5f",
|
38
43
|
"key": "attr",
|
39
44
|
"value": {
|
40
45
|
"combinator": "OR",
|
@@ -50,3 +55,77 @@ def test_serialize_node__lazy_reference_in_prompt_inputs():
|
|
50
55
|
},
|
51
56
|
}
|
52
57
|
]
|
58
|
+
|
59
|
+
|
60
|
+
def _no_display_class(Node: Type[InlinePromptNode]):
|
61
|
+
return None
|
62
|
+
|
63
|
+
|
64
|
+
def _display_class_with_node_input_ids_by_name(Node: Type[InlinePromptNode]):
|
65
|
+
class PromptNodeDisplay(BaseInlinePromptNodeDisplay[Node]): # type: ignore[valid-type]
|
66
|
+
node_input_ids_by_name = {"foo": UUID("fba6a4d5-835a-4e99-afb7-f6a4aed15110")}
|
67
|
+
|
68
|
+
return PromptNodeDisplay
|
69
|
+
|
70
|
+
|
71
|
+
def _display_class_with_node_input_ids_by_name_with_inputs_prefix(Node: Type[InlinePromptNode]):
|
72
|
+
class PromptNodeDisplay(BaseInlinePromptNodeDisplay[Node]): # type: ignore[valid-type]
|
73
|
+
node_input_ids_by_name = {"prompt_inputs.foo": UUID("fba6a4d5-835a-4e99-afb7-f6a4aed15110")}
|
74
|
+
|
75
|
+
return PromptNodeDisplay
|
76
|
+
|
77
|
+
|
78
|
+
@pytest.mark.parametrize(
|
79
|
+
["GetDisplayClass", "expected_input_id"],
|
80
|
+
[
|
81
|
+
(_no_display_class, "8aa4ce7f-5eb8-41b7-abd0-ea2b40c8fb88"),
|
82
|
+
(_display_class_with_node_input_ids_by_name, "fba6a4d5-835a-4e99-afb7-f6a4aed15110"),
|
83
|
+
(_display_class_with_node_input_ids_by_name_with_inputs_prefix, "fba6a4d5-835a-4e99-afb7-f6a4aed15110"),
|
84
|
+
],
|
85
|
+
ids=[
|
86
|
+
"no_display_class",
|
87
|
+
"display_class_with_node_input_ids_by_name",
|
88
|
+
"display_class_with_node_input_ids_by_name_with_inputs_prefix",
|
89
|
+
],
|
90
|
+
)
|
91
|
+
def test_serialize_node__prompt_inputs(GetDisplayClass, expected_input_id):
|
92
|
+
# GIVEN a prompt node with inputs
|
93
|
+
class MyPromptNode(InlinePromptNode):
|
94
|
+
prompt_inputs = {"foo": "bar"}
|
95
|
+
blocks = []
|
96
|
+
ml_model = "gpt-4o"
|
97
|
+
|
98
|
+
# AND a workflow with the prompt node
|
99
|
+
class Workflow(BaseWorkflow):
|
100
|
+
graph = MyPromptNode
|
101
|
+
|
102
|
+
# AND a display class
|
103
|
+
GetDisplayClass(MyPromptNode)
|
104
|
+
|
105
|
+
# WHEN the workflow is serialized
|
106
|
+
workflow_display = get_workflow_display(base_display_class=VellumWorkflowDisplay, workflow_class=Workflow)
|
107
|
+
serialized_workflow: dict = workflow_display.serialize()
|
108
|
+
|
109
|
+
# THEN the node should properly serialize the inputs
|
110
|
+
my_prompt_node = next(
|
111
|
+
node for node in serialized_workflow["workflow_raw_data"]["nodes"] if node["id"] == str(MyPromptNode.__id__)
|
112
|
+
)
|
113
|
+
|
114
|
+
assert my_prompt_node["inputs"] == [
|
115
|
+
{
|
116
|
+
"id": expected_input_id,
|
117
|
+
"key": "foo",
|
118
|
+
"value": {
|
119
|
+
"rules": [
|
120
|
+
{
|
121
|
+
"type": "CONSTANT_VALUE",
|
122
|
+
"data": {
|
123
|
+
"type": "STRING",
|
124
|
+
"value": "bar",
|
125
|
+
},
|
126
|
+
}
|
127
|
+
],
|
128
|
+
"combinator": "OR",
|
129
|
+
},
|
130
|
+
}
|
131
|
+
]
|
@@ -1,5 +1,7 @@
|
|
1
1
|
import importlib
|
2
|
+
from importlib.machinery import ModuleSpec
|
2
3
|
import re
|
4
|
+
import sys
|
3
5
|
from typing import Optional
|
4
6
|
|
5
7
|
|
@@ -8,11 +10,20 @@ class VirtualFileLoader(importlib.abc.Loader):
|
|
8
10
|
self.files = files
|
9
11
|
self.namespace = namespace
|
10
12
|
|
11
|
-
def create_module(self, spec):
|
12
|
-
|
13
|
+
def create_module(self, spec: ModuleSpec):
|
14
|
+
"""
|
15
|
+
We started with cpython/Lib/importlib/_bootstrap.py::FrozenImporter::create_module here
|
16
|
+
|
17
|
+
https://github.com/python/cpython/blob/053c285f6b41f92fbdd1d4ff0c959cceefacd7cd/Lib/importlib/_bootstrap.py#L1160C1-L1169C22
|
18
|
+
|
19
|
+
and reduced our needs to just updating the __file__ attribute directly.
|
20
|
+
"""
|
21
|
+
module = type(sys)(spec.name)
|
22
|
+
module.__file__ = spec.origin
|
23
|
+
return module
|
13
24
|
|
14
25
|
def exec_module(self, module):
|
15
|
-
module_info = self._resolve_module(module.__spec__.
|
26
|
+
module_info = self._resolve_module(module.__spec__.name)
|
16
27
|
|
17
28
|
if module_info:
|
18
29
|
file_path, code = module_info
|
@@ -66,7 +77,8 @@ class VirtualFileFinder(importlib.abc.MetaPathFinder, importlib.abc.Loader):
|
|
66
77
|
return importlib.machinery.ModuleSpec(
|
67
78
|
fullname,
|
68
79
|
self.loader,
|
69
|
-
origin=
|
80
|
+
origin=file_path,
|
70
81
|
is_package=is_package,
|
71
82
|
)
|
83
|
+
|
72
84
|
return None
|
@@ -4,11 +4,18 @@ from uuid import uuid4
|
|
4
4
|
from typing import Type, cast
|
5
5
|
|
6
6
|
from vellum.client.core.pydantic_utilities import UniversalBaseModel
|
7
|
+
from vellum.client.types.code_executor_response import CodeExecutorResponse
|
8
|
+
from vellum.client.types.number_vellum_value import NumberVellumValue
|
7
9
|
from vellum.workflows import BaseWorkflow
|
8
10
|
from vellum.workflows.nodes import BaseNode
|
9
11
|
from vellum_ee.workflows.server.virtual_file_loader import VirtualFileFinder
|
10
12
|
|
11
13
|
|
14
|
+
@pytest.fixture
|
15
|
+
def mock_open(mocker):
|
16
|
+
return mocker.patch("vellum.workflows.nodes.displayable.code_execution_node.utils.open")
|
17
|
+
|
18
|
+
|
12
19
|
def test_load_workflow_event_display_context():
|
13
20
|
# DEPRECATED: Use `vellum.workflows.events.workflow.WorkflowEventDisplayContext` instead. Will be removed in 0.15.0
|
14
21
|
from vellum_ee.workflows.display.types import WorkflowEventDisplayContext
|
@@ -72,9 +79,16 @@ class StartNode(BaseNode):
|
|
72
79
|
assert start_node.foo.instance.name == "StartNode.Outputs.bar"
|
73
80
|
|
74
81
|
|
75
|
-
|
76
|
-
|
77
|
-
|
82
|
+
def test_load_from_module__ts_code_in_file_loader(
|
83
|
+
mock_open,
|
84
|
+
vellum_client,
|
85
|
+
):
|
86
|
+
# GIVEN typescript code
|
87
|
+
ts_code = """async function main(): any {
|
88
|
+
return 5;
|
89
|
+
}"""
|
90
|
+
|
91
|
+
# AND a workflow module with only a code execution node
|
78
92
|
files = {
|
79
93
|
"__init__.py": "",
|
80
94
|
"workflow.py": """\
|
@@ -95,18 +109,13 @@ from typing import Any
|
|
95
109
|
from vellum.workflows.nodes.displayable import CodeExecutionNode as BaseCodeExecutionNode
|
96
110
|
from vellum.workflows.state import BaseState
|
97
111
|
|
98
|
-
class CodeExecutionNode(BaseCodeExecutionNode[BaseState,
|
112
|
+
class CodeExecutionNode(BaseCodeExecutionNode[BaseState, int]):
|
99
113
|
filepath = "./script.ts"
|
100
114
|
code_inputs = {}
|
101
115
|
runtime = "TYPESCRIPT_5_3_3"
|
102
116
|
packages = []
|
103
117
|
""",
|
104
|
-
"nodes/code_execution_node/script.ts":
|
105
|
-
text: string,
|
106
|
-
}): any {
|
107
|
-
const matches = inputs.text.match(/\\((.+?)\\)/gs);
|
108
|
-
return matches;
|
109
|
-
}""",
|
118
|
+
"nodes/code_execution_node/script.ts": ts_code,
|
110
119
|
}
|
111
120
|
|
112
121
|
namespace = str(uuid4())
|
@@ -114,6 +123,16 @@ class CodeExecutionNode(BaseCodeExecutionNode[BaseState, Any]):
|
|
114
123
|
# AND the virtual file loader is registered
|
115
124
|
sys.meta_path.append(VirtualFileFinder(files, namespace))
|
116
125
|
|
126
|
+
# AND the open function returns our file content
|
127
|
+
mock_open.return_value.__enter__.return_value.read.return_value = ts_code
|
128
|
+
|
129
|
+
# AND we know what the Code Execution Node will respond with
|
130
|
+
mock_code_execution = CodeExecutorResponse(
|
131
|
+
log="hello",
|
132
|
+
output=NumberVellumValue(value=5),
|
133
|
+
)
|
134
|
+
vellum_client.execute_code.return_value = mock_code_execution
|
135
|
+
|
117
136
|
# WHEN the workflow is loaded
|
118
137
|
Workflow = BaseWorkflow.load_from_module(namespace)
|
119
138
|
workflow = Workflow()
|
@@ -122,4 +141,10 @@ class CodeExecutionNode(BaseCodeExecutionNode[BaseState, Any]):
|
|
122
141
|
assert workflow
|
123
142
|
|
124
143
|
event = workflow.run()
|
125
|
-
assert event.name == "workflow.execution.fulfilled"
|
144
|
+
assert event.name == "workflow.execution.fulfilled", event.model_dump_json()
|
145
|
+
|
146
|
+
# AND we pass in the correct file path to the open function
|
147
|
+
assert mock_open.call_args[0][0] == f"{namespace}/nodes/./script.ts"
|
148
|
+
|
149
|
+
# AND we invoke the Code Execution Node with the correct code
|
150
|
+
assert vellum_client.execute_code.call_args.kwargs["code"] == ts_code
|
File without changes
|
File without changes
|
File without changes
|