vellum-ai 0.12.11__py3-none-any.whl → 0.12.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/__init__.py +3 -0
- vellum/client/core/client_wrapper.py +1 -1
- vellum/plugins/pydantic.py +12 -2
- vellum/workflows/descriptors/tests/test_utils.py +3 -0
- vellum/workflows/exceptions.py +1 -1
- vellum/workflows/nodes/bases/base.py +4 -1
- vellum/workflows/nodes/bases/base_adornment_node.py +75 -0
- vellum/workflows/nodes/bases/tests/test_base_node.py +13 -0
- vellum/workflows/nodes/core/inline_subworkflow_node/node.py +2 -0
- vellum/workflows/nodes/core/map_node/node.py +49 -45
- vellum/workflows/nodes/core/retry_node/node.py +10 -45
- vellum/workflows/nodes/core/try_node/node.py +12 -84
- vellum/workflows/nodes/utils.py +44 -1
- vellum/workflows/references/constant.py +21 -0
- vellum/workflows/runner/runner.py +11 -3
- vellum/workflows/types/cycle_map.py +34 -0
- vellum/workflows/workflows/base.py +45 -12
- {vellum_ai-0.12.11.dist-info → vellum_ai-0.12.14.dist-info}/METADATA +1 -1
- {vellum_ai-0.12.11.dist-info → vellum_ai-0.12.14.dist-info}/RECORD +24 -19
- vellum_ee/workflows/server/__init__.py +0 -0
- vellum_ee/workflows/server/virtual_file_loader.py +42 -0
- {vellum_ai-0.12.11.dist-info → vellum_ai-0.12.14.dist-info}/LICENSE +0 -0
- {vellum_ai-0.12.11.dist-info → vellum_ai-0.12.14.dist-info}/WHEEL +0 -0
- {vellum_ai-0.12.11.dist-info → vellum_ai-0.12.14.dist-info}/entry_points.txt +0 -0
vellum/__init__.py
CHANGED
@@ -18,7 +18,7 @@ class BaseClientWrapper:
|
|
18
18
|
headers: typing.Dict[str, str] = {
|
19
19
|
"X-Fern-Language": "Python",
|
20
20
|
"X-Fern-SDK-Name": "vellum-ai",
|
21
|
-
"X-Fern-SDK-Version": "0.12.
|
21
|
+
"X-Fern-SDK-Version": "0.12.14",
|
22
22
|
}
|
23
23
|
headers["X_API_KEY"] = self.api_key
|
24
24
|
return headers
|
vellum/plugins/pydantic.py
CHANGED
@@ -1,3 +1,4 @@
|
|
1
|
+
from functools import lru_cache
|
1
2
|
from typing import Any, Dict, Literal, Optional, Tuple, Union
|
2
3
|
|
3
4
|
from pydantic.plugin import (
|
@@ -10,12 +11,20 @@ from pydantic.plugin import (
|
|
10
11
|
)
|
11
12
|
from pydantic_core import CoreSchema
|
12
13
|
|
13
|
-
|
14
|
+
|
15
|
+
@lru_cache(maxsize=1)
|
16
|
+
def import_base_descriptor():
|
17
|
+
"""
|
18
|
+
We have to avoid importing from vellum.* in this file because it will cause a circular import.
|
19
|
+
"""
|
20
|
+
from vellum.workflows.descriptors.base import BaseDescriptor
|
21
|
+
|
22
|
+
return BaseDescriptor
|
14
23
|
|
15
24
|
|
16
25
|
# https://docs.pydantic.dev/2.8/concepts/plugins/#build-a-plugin
|
17
26
|
class OnValidatePython(ValidatePythonHandlerProtocol):
|
18
|
-
tracked_descriptors: Dict[str,
|
27
|
+
tracked_descriptors: Dict[str, Any] = {}
|
19
28
|
|
20
29
|
def on_enter(
|
21
30
|
self,
|
@@ -31,6 +40,7 @@ class OnValidatePython(ValidatePythonHandlerProtocol):
|
|
31
40
|
return
|
32
41
|
|
33
42
|
self.tracked_descriptors = {}
|
43
|
+
BaseDescriptor = import_base_descriptor()
|
34
44
|
|
35
45
|
for key, value in input.items():
|
36
46
|
if isinstance(value, BaseDescriptor):
|
@@ -2,6 +2,7 @@ import pytest
|
|
2
2
|
|
3
3
|
from vellum.workflows.descriptors.utils import resolve_value
|
4
4
|
from vellum.workflows.nodes.bases.base import BaseNode
|
5
|
+
from vellum.workflows.references.constant import ConstantValueReference
|
5
6
|
from vellum.workflows.state.base import BaseState
|
6
7
|
|
7
8
|
|
@@ -73,6 +74,7 @@ class DummyNode(BaseNode[FixtureState]):
|
|
73
74
|
True,
|
74
75
|
),
|
75
76
|
(FixtureState.zeta["foo"], "bar"),
|
77
|
+
(ConstantValueReference(1), 1),
|
76
78
|
],
|
77
79
|
ids=[
|
78
80
|
"or",
|
@@ -116,6 +118,7 @@ class DummyNode(BaseNode[FixtureState]):
|
|
116
118
|
"is_not_blank",
|
117
119
|
"or_and",
|
118
120
|
"accessor",
|
121
|
+
"constants",
|
119
122
|
],
|
120
123
|
)
|
121
124
|
def test_resolve_value__happy_path(descriptor, expected_value):
|
vellum/workflows/exceptions.py
CHANGED
@@ -2,7 +2,7 @@ from vellum.workflows.errors import WorkflowError, WorkflowErrorCode
|
|
2
2
|
|
3
3
|
|
4
4
|
class NodeException(Exception):
|
5
|
-
def __init__(self, message: str, code: WorkflowErrorCode):
|
5
|
+
def __init__(self, message: str, code: WorkflowErrorCode = WorkflowErrorCode.INTERNAL_ERROR):
|
6
6
|
self.message = message
|
7
7
|
self.code = code
|
8
8
|
super().__init__(message)
|
@@ -214,6 +214,9 @@ class _BaseNodeExecutionMeta(type):
|
|
214
214
|
return self_execution_class.node_class.__name__ == other_execution_class.node_class.__name__
|
215
215
|
|
216
216
|
|
217
|
+
NodeRunResponse = Union[BaseOutputs, Iterator[BaseOutput]]
|
218
|
+
|
219
|
+
|
217
220
|
class BaseNode(Generic[StateType], metaclass=BaseNodeMeta):
|
218
221
|
__id__: UUID = uuid4_from_hash(__qualname__)
|
219
222
|
state: StateType
|
@@ -350,7 +353,7 @@ class BaseNode(Generic[StateType], metaclass=BaseNodeMeta):
|
|
350
353
|
|
351
354
|
self._inputs = MappingProxyType(all_inputs)
|
352
355
|
|
353
|
-
def run(self) ->
|
356
|
+
def run(self) -> NodeRunResponse:
|
354
357
|
return self.Outputs()
|
355
358
|
|
356
359
|
def __repr__(self) -> str:
|
@@ -0,0 +1,75 @@
|
|
1
|
+
from typing import TYPE_CHECKING, Any, Dict, Generic, Optional, Tuple, Type
|
2
|
+
|
3
|
+
from vellum.workflows.nodes.bases.base import BaseNode, BaseNodeMeta
|
4
|
+
from vellum.workflows.outputs.base import BaseOutputs
|
5
|
+
from vellum.workflows.references.output import OutputReference
|
6
|
+
from vellum.workflows.types.generics import StateType
|
7
|
+
|
8
|
+
if TYPE_CHECKING:
|
9
|
+
from vellum.workflows import BaseWorkflow
|
10
|
+
|
11
|
+
|
12
|
+
class _BaseAdornmentNodeMeta(BaseNodeMeta):
|
13
|
+
def __new__(cls, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]) -> Any:
|
14
|
+
node_class = super().__new__(cls, name, bases, dct)
|
15
|
+
|
16
|
+
subworkflow_attribute = dct.get("subworkflow")
|
17
|
+
if not subworkflow_attribute:
|
18
|
+
return node_class
|
19
|
+
|
20
|
+
if not issubclass(node_class, BaseAdornmentNode):
|
21
|
+
raise ValueError("BaseAdornableNodeMeta can only be used on subclasses of BaseAdornableNode")
|
22
|
+
|
23
|
+
subworkflow_outputs = getattr(subworkflow_attribute, "Outputs")
|
24
|
+
if not issubclass(subworkflow_outputs, BaseOutputs):
|
25
|
+
raise ValueError("subworkflow.Outputs must be a subclass of BaseOutputs")
|
26
|
+
|
27
|
+
outputs_class = dct.get("Outputs")
|
28
|
+
if not outputs_class:
|
29
|
+
raise ValueError("Outputs class not found in base classes")
|
30
|
+
|
31
|
+
if not issubclass(outputs_class, BaseNode.Outputs):
|
32
|
+
raise ValueError("Outputs class must be a subclass of BaseNode.Outputs")
|
33
|
+
|
34
|
+
for descriptor in subworkflow_outputs:
|
35
|
+
node_class.__annotate_outputs_class__(outputs_class, descriptor)
|
36
|
+
|
37
|
+
return node_class
|
38
|
+
|
39
|
+
def __getattribute__(cls, name: str) -> Any:
|
40
|
+
try:
|
41
|
+
return super().__getattribute__(name)
|
42
|
+
except AttributeError:
|
43
|
+
if name != "__wrapped_node__" and issubclass(cls, BaseAdornmentNode):
|
44
|
+
return getattr(cls.__wrapped_node__, name)
|
45
|
+
raise
|
46
|
+
|
47
|
+
@property
|
48
|
+
def _localns(cls) -> Dict[str, Any]:
|
49
|
+
if not hasattr(cls, "SubworkflowInputs"):
|
50
|
+
return super()._localns
|
51
|
+
|
52
|
+
return {
|
53
|
+
**super()._localns,
|
54
|
+
"SubworkflowInputs": getattr(cls, "SubworkflowInputs"),
|
55
|
+
}
|
56
|
+
|
57
|
+
|
58
|
+
class BaseAdornmentNode(
|
59
|
+
BaseNode[StateType],
|
60
|
+
Generic[StateType],
|
61
|
+
metaclass=_BaseAdornmentNodeMeta,
|
62
|
+
):
|
63
|
+
"""
|
64
|
+
A base node that enables the node to be used as an adornment - meaning it can wrap another node. The
|
65
|
+
wrapped node is stored in the `__wrapped_node__` attribute and is redefined as a single-node subworkflow.
|
66
|
+
"""
|
67
|
+
|
68
|
+
__wrapped_node__: Optional[Type["BaseNode"]] = None
|
69
|
+
subworkflow: Type["BaseWorkflow"]
|
70
|
+
|
71
|
+
@classmethod
|
72
|
+
def __annotate_outputs_class__(cls, outputs_class: Type[BaseOutputs], reference: OutputReference) -> None:
|
73
|
+
# Subclasses of BaseAdornableNode can override this method to provider their own
|
74
|
+
# approach to annotating the outputs class based on the `subworkflow.Outputs`
|
75
|
+
setattr(outputs_class, reference.name, reference)
|
@@ -1,6 +1,7 @@
|
|
1
1
|
from uuid import UUID
|
2
2
|
from typing import Optional
|
3
3
|
|
4
|
+
from vellum.client.types.string_vellum_value_request import StringVellumValueRequest
|
4
5
|
from vellum.core.pydantic_utilities import UniversalBaseModel
|
5
6
|
from vellum.workflows.inputs.base import BaseInputs
|
6
7
|
from vellum.workflows.nodes.bases.base import BaseNode
|
@@ -135,3 +136,15 @@ def test_base_node__default_id():
|
|
135
136
|
|
136
137
|
# THEN it should equal the hash of `test_base_node__default_id.<locals>.MyNode`
|
137
138
|
assert my_id == UUID("8e71bea7-ce68-492f-9abe-477c788e6273")
|
139
|
+
|
140
|
+
|
141
|
+
def test_base_node__node_resolution__descriptor_in_fern_pydantic():
|
142
|
+
class State(BaseState):
|
143
|
+
foo: str
|
144
|
+
|
145
|
+
class SomeNode(BaseNode):
|
146
|
+
model = StringVellumValueRequest(value=State.foo)
|
147
|
+
|
148
|
+
node = SomeNode(state=State(foo="bar"))
|
149
|
+
|
150
|
+
assert node.model.value == "bar"
|
@@ -10,6 +10,7 @@ from vellum.workflows.state.base import BaseState
|
|
10
10
|
from vellum.workflows.state.context import WorkflowContext
|
11
11
|
from vellum.workflows.types.core import EntityInputsInterface
|
12
12
|
from vellum.workflows.types.generics import StateType, WorkflowInputsType
|
13
|
+
from vellum.workflows.workflows.event_filters import all_workflow_event_filter
|
13
14
|
|
14
15
|
if TYPE_CHECKING:
|
15
16
|
from vellum.workflows.workflows.base import BaseWorkflow
|
@@ -36,6 +37,7 @@ class InlineSubworkflowNode(BaseNode[StateType], Generic[StateType, WorkflowInpu
|
|
36
37
|
)
|
37
38
|
subworkflow_stream = subworkflow.stream(
|
38
39
|
inputs=self._compile_subworkflow_inputs(),
|
40
|
+
event_filter=all_workflow_event_filter,
|
39
41
|
)
|
40
42
|
|
41
43
|
outputs: Optional[BaseOutputs] = None
|
@@ -9,21 +9,21 @@ from vellum.workflows.errors.types import WorkflowErrorCode
|
|
9
9
|
from vellum.workflows.events.types import ParentContext
|
10
10
|
from vellum.workflows.exceptions import NodeException
|
11
11
|
from vellum.workflows.inputs.base import BaseInputs
|
12
|
-
from vellum.workflows.nodes.bases import
|
12
|
+
from vellum.workflows.nodes.bases.base_adornment_node import BaseAdornmentNode
|
13
|
+
from vellum.workflows.nodes.utils import create_adornment
|
13
14
|
from vellum.workflows.outputs import BaseOutputs
|
14
|
-
from vellum.workflows.
|
15
|
+
from vellum.workflows.references.output import OutputReference
|
15
16
|
from vellum.workflows.state.context import WorkflowContext
|
16
|
-
from vellum.workflows.types.generics import
|
17
|
+
from vellum.workflows.types.generics import StateType
|
17
18
|
from vellum.workflows.workflows.event_filters import all_workflow_event_filter
|
18
19
|
|
19
20
|
if TYPE_CHECKING:
|
20
|
-
from vellum.workflows import BaseWorkflow
|
21
21
|
from vellum.workflows.events.workflow import WorkflowEvent
|
22
22
|
|
23
23
|
MapNodeItemType = TypeVar("MapNodeItemType")
|
24
24
|
|
25
25
|
|
26
|
-
class MapNode(
|
26
|
+
class MapNode(BaseAdornmentNode[StateType], Generic[StateType, MapNodeItemType]):
|
27
27
|
"""
|
28
28
|
Used to map over a list of items and execute a Subworkflow on each iteration.
|
29
29
|
|
@@ -33,11 +33,10 @@ class MapNode(BaseNode, Generic[StateType, MapNodeItemType]):
|
|
33
33
|
"""
|
34
34
|
|
35
35
|
items: List[MapNodeItemType]
|
36
|
-
subworkflow: Type["BaseWorkflow"]
|
37
36
|
concurrency: Optional[int] = None
|
38
37
|
|
39
|
-
class Outputs(
|
40
|
-
|
38
|
+
class Outputs(BaseAdornmentNode.Outputs):
|
39
|
+
pass
|
41
40
|
|
42
41
|
class SubworkflowInputs(BaseInputs):
|
43
42
|
# TODO: Both type: ignore's below are believed to be incorrect and both have the following error:
|
@@ -54,6 +53,7 @@ class MapNode(BaseNode, Generic[StateType, MapNodeItemType]):
|
|
54
53
|
mapped_items[output_descripter.name] = [None] * len(self.items)
|
55
54
|
|
56
55
|
self._event_queue: Queue[Tuple[int, WorkflowEvent]] = Queue()
|
56
|
+
self._concurrency_queue: Queue[Thread] = Queue()
|
57
57
|
fulfilled_iterations: List[bool] = []
|
58
58
|
for index, item in enumerate(self.items):
|
59
59
|
fulfilled_iterations.append(False)
|
@@ -66,11 +66,21 @@ class MapNode(BaseNode, Generic[StateType, MapNodeItemType]):
|
|
66
66
|
"parent_context": parent_context,
|
67
67
|
},
|
68
68
|
)
|
69
|
-
|
69
|
+
if self.concurrency is None:
|
70
|
+
thread.start()
|
71
|
+
else:
|
72
|
+
self._concurrency_queue.put(thread)
|
73
|
+
|
74
|
+
if self.concurrency is not None:
|
75
|
+
concurrency_count = 0
|
76
|
+
while concurrency_count < self.concurrency:
|
77
|
+
is_empty = self._start_thread()
|
78
|
+
if is_empty:
|
79
|
+
break
|
80
|
+
|
81
|
+
concurrency_count += 1
|
70
82
|
|
71
83
|
try:
|
72
|
-
# We should consolidate this logic with the logic workflow runner uses
|
73
|
-
# https://app.shortcut.com/vellum/story/4736
|
74
84
|
while map_node_event := self._event_queue.get():
|
75
85
|
index = map_node_event[0]
|
76
86
|
terminal_event = map_node_event[1]
|
@@ -86,6 +96,9 @@ class MapNode(BaseNode, Generic[StateType, MapNodeItemType]):
|
|
86
96
|
fulfilled_iterations[index] = True
|
87
97
|
if all(fulfilled_iterations):
|
88
98
|
break
|
99
|
+
|
100
|
+
if self.concurrency is not None:
|
101
|
+
self._start_thread()
|
89
102
|
elif terminal_event.name == "workflow.execution.paused":
|
90
103
|
raise NodeException(
|
91
104
|
code=WorkflowErrorCode.INVALID_OUTPUTS,
|
@@ -98,7 +111,12 @@ class MapNode(BaseNode, Generic[StateType, MapNodeItemType]):
|
|
98
111
|
)
|
99
112
|
except Empty:
|
100
113
|
pass
|
101
|
-
|
114
|
+
|
115
|
+
outputs = self.Outputs()
|
116
|
+
for output_name, output_list in mapped_items.items():
|
117
|
+
setattr(outputs, output_name, output_list)
|
118
|
+
|
119
|
+
return outputs
|
102
120
|
|
103
121
|
def _context_run_subworkflow(
|
104
122
|
self, *, item: MapNodeItemType, index: int, parent_context: Optional[ParentContext] = None
|
@@ -109,7 +127,10 @@ class MapNode(BaseNode, Generic[StateType, MapNodeItemType]):
|
|
109
127
|
|
110
128
|
def _run_subworkflow(self, *, item: MapNodeItemType, index: int) -> None:
|
111
129
|
context = WorkflowContext(vellum_client=self._context.vellum_client)
|
112
|
-
subworkflow = self.subworkflow(
|
130
|
+
subworkflow = self.subworkflow(
|
131
|
+
parent_state=self.state,
|
132
|
+
context=context,
|
133
|
+
)
|
113
134
|
events = subworkflow.stream(
|
114
135
|
inputs=self.SubworkflowInputs(index=index, item=item, all_items=self.items),
|
115
136
|
event_filter=all_workflow_event_filter,
|
@@ -118,6 +139,14 @@ class MapNode(BaseNode, Generic[StateType, MapNodeItemType]):
|
|
118
139
|
for event in events:
|
119
140
|
self._event_queue.put((index, event))
|
120
141
|
|
142
|
+
def _start_thread(self) -> bool:
|
143
|
+
if self._concurrency_queue.empty():
|
144
|
+
return False
|
145
|
+
|
146
|
+
thread = self._concurrency_queue.get()
|
147
|
+
thread.start()
|
148
|
+
return True
|
149
|
+
|
121
150
|
@overload
|
122
151
|
@classmethod
|
123
152
|
def wrap(cls, items: List[MapNodeItemType]) -> Callable[..., Type["MapNode[StateType, MapNodeItemType]"]]: ...
|
@@ -134,37 +163,12 @@ class MapNode(BaseNode, Generic[StateType, MapNodeItemType]):
|
|
134
163
|
def wrap(
|
135
164
|
cls, items: Union[List[MapNodeItemType], BaseDescriptor[List[MapNodeItemType]]]
|
136
165
|
) -> Callable[..., Type["MapNode[StateType, MapNodeItemType]"]]:
|
137
|
-
|
166
|
+
return create_adornment(cls, attributes={"items": items})
|
138
167
|
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
class Subworkflow(BaseWorkflow[MapNode.SubworkflowInputs, BaseState]):
|
145
|
-
graph = inner_cls
|
146
|
-
|
147
|
-
# mypy is wrong here, this works and is defined
|
148
|
-
class Outputs(inner_cls.Outputs): # type: ignore[name-defined]
|
149
|
-
pass
|
150
|
-
|
151
|
-
class WrappedNodeOutputs(BaseOutputs):
|
152
|
-
pass
|
153
|
-
|
154
|
-
WrappedNodeOutputs.__annotations__ = {
|
155
|
-
# TODO: We'll need to infer the type T of Subworkflow.Outputs[name] so we could do List[T] here
|
156
|
-
# https://app.shortcut.com/vellum/story/4119
|
157
|
-
descriptor.name: List
|
158
|
-
for descriptor in inner_cls.Outputs
|
159
|
-
}
|
160
|
-
|
161
|
-
class WrappedNode(MapNode[StateType, MapNodeItemType]):
|
162
|
-
items = _items
|
163
|
-
subworkflow = Subworkflow
|
164
|
-
|
165
|
-
class Outputs(WrappedNodeOutputs):
|
166
|
-
pass
|
167
|
-
|
168
|
-
return WrappedNode
|
168
|
+
@classmethod
|
169
|
+
def __annotate_outputs_class__(cls, outputs_class: Type[BaseOutputs], reference: OutputReference) -> None:
|
170
|
+
parameter_type = reference.types[0]
|
171
|
+
annotation = List[parameter_type] # type: ignore[valid-type]
|
169
172
|
|
170
|
-
|
173
|
+
previous_annotations = {prev: annotation for prev in outputs_class.__annotations__ if not prev.startswith("_")}
|
174
|
+
outputs_class.__annotations__ = {**previous_annotations, reference.name: annotation}
|
@@ -1,27 +1,16 @@
|
|
1
|
-
from typing import
|
1
|
+
from typing import Callable, Generic, Optional, Type
|
2
2
|
|
3
3
|
from vellum.workflows.errors.types import WorkflowErrorCode
|
4
4
|
from vellum.workflows.exceptions import NodeException
|
5
5
|
from vellum.workflows.inputs.base import BaseInputs
|
6
6
|
from vellum.workflows.nodes.bases import BaseNode
|
7
|
-
from vellum.workflows.nodes.bases.
|
8
|
-
from vellum.workflows.
|
7
|
+
from vellum.workflows.nodes.bases.base_adornment_node import BaseAdornmentNode
|
8
|
+
from vellum.workflows.nodes.utils import create_adornment
|
9
|
+
from vellum.workflows.state.context import WorkflowContext
|
9
10
|
from vellum.workflows.types.generics import StateType
|
10
11
|
|
11
|
-
if TYPE_CHECKING:
|
12
|
-
from vellum.workflows import BaseWorkflow
|
13
12
|
|
14
|
-
|
15
|
-
class _RetryNodeMeta(BaseNodeMeta):
|
16
|
-
@property
|
17
|
-
def _localns(cls) -> Dict[str, Any]:
|
18
|
-
return {
|
19
|
-
**super()._localns,
|
20
|
-
"SubworkflowInputs": getattr(cls, "SubworkflowInputs"),
|
21
|
-
}
|
22
|
-
|
23
|
-
|
24
|
-
class RetryNode(BaseNode[StateType], Generic[StateType], metaclass=_RetryNodeMeta):
|
13
|
+
class RetryNode(BaseAdornmentNode[StateType], Generic[StateType]):
|
25
14
|
"""
|
26
15
|
Used to retry a Subworkflow a specified number of times.
|
27
16
|
|
@@ -32,7 +21,6 @@ class RetryNode(BaseNode[StateType], Generic[StateType], metaclass=_RetryNodeMet
|
|
32
21
|
|
33
22
|
max_attempts: int
|
34
23
|
retry_on_error_code: Optional[WorkflowErrorCode] = None
|
35
|
-
subworkflow: Type["BaseWorkflow[SubworkflowInputs, BaseState]"]
|
36
24
|
|
37
25
|
class SubworkflowInputs(BaseInputs):
|
38
26
|
attempt_number: int
|
@@ -41,9 +29,10 @@ class RetryNode(BaseNode[StateType], Generic[StateType], metaclass=_RetryNodeMet
|
|
41
29
|
last_exception = Exception("max_attempts must be greater than 0")
|
42
30
|
for index in range(self.max_attempts):
|
43
31
|
attempt_number = index + 1
|
32
|
+
context = WorkflowContext(vellum_client=self._context.vellum_client)
|
44
33
|
subworkflow = self.subworkflow(
|
45
34
|
parent_state=self.state,
|
46
|
-
context=
|
35
|
+
context=context,
|
47
36
|
)
|
48
37
|
terminal_event = subworkflow.run(
|
49
38
|
inputs=self.SubworkflowInputs(attempt_number=attempt_number),
|
@@ -78,30 +67,6 @@ Message: {terminal_event.error.message}""",
|
|
78
67
|
def wrap(
|
79
68
|
cls, max_attempts: int, retry_on_error_code: Optional[WorkflowErrorCode] = None
|
80
69
|
) -> Callable[..., Type["RetryNode"]]:
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
def decorator(inner_cls: Type[BaseNode]) -> Type["RetryNode"]:
|
85
|
-
# Investigate how to use dependency injection to avoid circular imports
|
86
|
-
# https://app.shortcut.com/vellum/story/4116
|
87
|
-
from vellum.workflows import BaseWorkflow
|
88
|
-
|
89
|
-
class Subworkflow(BaseWorkflow[RetryNode.SubworkflowInputs, BaseState]):
|
90
|
-
graph = inner_cls
|
91
|
-
|
92
|
-
# mypy is wrong here, this works and is defined
|
93
|
-
class Outputs(inner_cls.Outputs): # type: ignore[name-defined]
|
94
|
-
pass
|
95
|
-
|
96
|
-
class WrappedNode(RetryNode[StateType]):
|
97
|
-
max_attempts = _max_attempts
|
98
|
-
retry_on_error_code = _retry_on_error_code
|
99
|
-
|
100
|
-
subworkflow = Subworkflow
|
101
|
-
|
102
|
-
class Outputs(Subworkflow.Outputs):
|
103
|
-
pass
|
104
|
-
|
105
|
-
return WrappedNode
|
106
|
-
|
107
|
-
return decorator
|
70
|
+
return create_adornment(
|
71
|
+
cls, attributes={"max_attempts": max_attempts, "retry_on_error_code": retry_on_error_code}
|
72
|
+
)
|
@@ -1,61 +1,18 @@
|
|
1
|
-
import
|
2
|
-
from types import ModuleType
|
3
|
-
from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Iterator, Optional, Set, Tuple, Type, TypeVar
|
1
|
+
from typing import Callable, Generic, Iterator, Optional, Set, Type
|
4
2
|
|
5
3
|
from vellum.workflows.errors.types import WorkflowError, WorkflowErrorCode
|
6
4
|
from vellum.workflows.exceptions import NodeException
|
7
5
|
from vellum.workflows.nodes.bases import BaseNode
|
8
|
-
from vellum.workflows.nodes.bases.
|
9
|
-
from vellum.workflows.nodes.utils import
|
6
|
+
from vellum.workflows.nodes.bases.base_adornment_node import BaseAdornmentNode
|
7
|
+
from vellum.workflows.nodes.utils import create_adornment
|
10
8
|
from vellum.workflows.outputs.base import BaseOutput, BaseOutputs
|
9
|
+
from vellum.workflows.references.output import OutputReference
|
11
10
|
from vellum.workflows.state.context import WorkflowContext
|
12
11
|
from vellum.workflows.types.generics import StateType
|
13
12
|
from vellum.workflows.workflows.event_filters import all_workflow_event_filter
|
14
13
|
|
15
|
-
if TYPE_CHECKING:
|
16
|
-
from vellum.workflows import BaseWorkflow
|
17
14
|
|
18
|
-
|
19
|
-
_T = TypeVar("_T", bound=BaseOutputs)
|
20
|
-
|
21
|
-
|
22
|
-
class _TryNodeMeta(BaseNodeMeta):
|
23
|
-
def __new__(cls, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]) -> Any:
|
24
|
-
node_class = super().__new__(cls, name, bases, dct)
|
25
|
-
|
26
|
-
subworkflow_attribute = dct.get("subworkflow")
|
27
|
-
if not subworkflow_attribute:
|
28
|
-
return node_class
|
29
|
-
|
30
|
-
subworkflow_outputs = getattr(subworkflow_attribute, "Outputs")
|
31
|
-
if not issubclass(subworkflow_outputs, BaseOutputs):
|
32
|
-
raise ValueError("subworkflow.Outputs must be a subclass of BaseOutputs")
|
33
|
-
|
34
|
-
outputs_class = dct.get("Outputs")
|
35
|
-
if not outputs_class:
|
36
|
-
raise ValueError("Outputs class not found in base classes")
|
37
|
-
|
38
|
-
if not issubclass(outputs_class, BaseNode.Outputs):
|
39
|
-
raise ValueError("Outputs class must be a subclass of BaseNode.Outputs")
|
40
|
-
|
41
|
-
for descriptor in subworkflow_outputs:
|
42
|
-
if descriptor.name == "error":
|
43
|
-
raise ValueError("`error` is a reserved name for TryNode.Outputs")
|
44
|
-
|
45
|
-
setattr(outputs_class, descriptor.name, descriptor)
|
46
|
-
|
47
|
-
return node_class
|
48
|
-
|
49
|
-
def __getattribute__(cls, name: str) -> Any:
|
50
|
-
try:
|
51
|
-
return super().__getattribute__(name)
|
52
|
-
except AttributeError:
|
53
|
-
if name != "__wrapped_node__" and issubclass(cls, TryNode):
|
54
|
-
return getattr(cls.__wrapped_node__, name)
|
55
|
-
raise
|
56
|
-
|
57
|
-
|
58
|
-
class TryNode(BaseNode[StateType], Generic[StateType], metaclass=_TryNodeMeta):
|
15
|
+
class TryNode(BaseAdornmentNode[StateType], Generic[StateType]):
|
59
16
|
"""
|
60
17
|
Used to execute a Subworkflow and handle errors.
|
61
18
|
|
@@ -63,9 +20,7 @@ class TryNode(BaseNode[StateType], Generic[StateType], metaclass=_TryNodeMeta):
|
|
63
20
|
subworkflow: Type["BaseWorkflow"] - The Subworkflow to execute
|
64
21
|
"""
|
65
22
|
|
66
|
-
__wrapped_node__: Optional[Type["BaseNode"]] = None
|
67
23
|
on_error_code: Optional[WorkflowErrorCode] = None
|
68
|
-
subworkflow: Type["BaseWorkflow"]
|
69
24
|
|
70
25
|
class Outputs(BaseNode.Outputs):
|
71
26
|
error: Optional[WorkflowError] = None
|
@@ -129,38 +84,11 @@ Message: {event.error.message}""",
|
|
129
84
|
|
130
85
|
@classmethod
|
131
86
|
def wrap(cls, on_error_code: Optional[WorkflowErrorCode] = None) -> Callable[..., Type["TryNode"]]:
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
inner_cls._is_wrapped_node = True
|
140
|
-
|
141
|
-
class Subworkflow(BaseWorkflow):
|
142
|
-
graph = inner_cls
|
143
|
-
|
144
|
-
# mypy is wrong here, this works and is defined
|
145
|
-
class Outputs(inner_cls.Outputs): # type: ignore[name-defined]
|
146
|
-
pass
|
147
|
-
|
148
|
-
dynamic_module = f"{inner_cls.__module__}.{inner_cls.__name__}.{ADORNMENT_MODULE_NAME}"
|
149
|
-
# This dynamic module allows calls to `type_hints` to work
|
150
|
-
sys.modules[dynamic_module] = ModuleType(dynamic_module)
|
151
|
-
|
152
|
-
# We use a dynamic wrapped node class to be uniquely tied to this `inner_cls` node during serialization
|
153
|
-
WrappedNode = type(
|
154
|
-
cls.__name__,
|
155
|
-
(TryNode,),
|
156
|
-
{
|
157
|
-
"__wrapped_node__": inner_cls,
|
158
|
-
"__module__": dynamic_module,
|
159
|
-
"on_error_code": _on_error_code,
|
160
|
-
"subworkflow": Subworkflow,
|
161
|
-
"Ports": type("Ports", (TryNode.Ports,), {port.name: port.copy() for port in inner_cls.Ports}),
|
162
|
-
},
|
163
|
-
)
|
164
|
-
return WrappedNode
|
87
|
+
return create_adornment(cls, attributes={"on_error_code": on_error_code})
|
88
|
+
|
89
|
+
@classmethod
|
90
|
+
def __annotate_outputs_class__(cls, outputs_class: Type[BaseOutputs], reference: OutputReference) -> None:
|
91
|
+
if reference.name == "error":
|
92
|
+
raise ValueError("`error` is a reserved name for TryNode.Outputs")
|
165
93
|
|
166
|
-
|
94
|
+
setattr(outputs_class, reference.name, reference)
|
vellum/workflows/nodes/utils.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
from functools import cache
|
2
|
-
|
2
|
+
import sys
|
3
|
+
from types import ModuleType
|
4
|
+
from typing import Any, Callable, Optional, Type, TypeVar
|
3
5
|
|
4
6
|
from vellum.workflows.nodes import BaseNode
|
5
7
|
from vellum.workflows.ports.port import Port
|
@@ -42,3 +44,44 @@ def has_wrapped_node(node: Type[NodeType]) -> bool:
|
|
42
44
|
return False
|
43
45
|
|
44
46
|
return True
|
47
|
+
|
48
|
+
|
49
|
+
AdornableNode = TypeVar("AdornableNode", bound=BaseNode)
|
50
|
+
|
51
|
+
|
52
|
+
def create_adornment(
|
53
|
+
adornable_cls: Type[AdornableNode], attributes: Optional[dict[str, Any]] = None
|
54
|
+
) -> Callable[..., Type["AdornableNode"]]:
|
55
|
+
def decorator(inner_cls: Type[BaseNode]) -> Type["AdornableNode"]:
|
56
|
+
# Investigate how to use dependency injection to avoid circular imports
|
57
|
+
# https://app.shortcut.com/vellum/story/4116
|
58
|
+
from vellum.workflows import BaseWorkflow
|
59
|
+
|
60
|
+
inner_cls._is_wrapped_node = True
|
61
|
+
|
62
|
+
class Subworkflow(BaseWorkflow):
|
63
|
+
graph = inner_cls
|
64
|
+
|
65
|
+
# mypy is wrong here, this works and is defined
|
66
|
+
class Outputs(inner_cls.Outputs): # type: ignore[name-defined]
|
67
|
+
pass
|
68
|
+
|
69
|
+
dynamic_module = f"{inner_cls.__module__}.{inner_cls.__name__}.{ADORNMENT_MODULE_NAME}"
|
70
|
+
# This dynamic module allows calls to `type_hints` to work
|
71
|
+
sys.modules[dynamic_module] = ModuleType(dynamic_module)
|
72
|
+
|
73
|
+
# We use a dynamic wrapped node class to be uniquely tied to this `inner_cls` node during serialization
|
74
|
+
WrappedNode = type(
|
75
|
+
adornable_cls.__name__,
|
76
|
+
(adornable_cls,),
|
77
|
+
{
|
78
|
+
"__wrapped_node__": inner_cls,
|
79
|
+
"__module__": dynamic_module,
|
80
|
+
"subworkflow": Subworkflow,
|
81
|
+
"Ports": type("Ports", (adornable_cls.Ports,), {port.name: port.copy() for port in inner_cls.Ports}),
|
82
|
+
**(attributes or {}),
|
83
|
+
},
|
84
|
+
)
|
85
|
+
return WrappedNode
|
86
|
+
|
87
|
+
return decorator
|
@@ -0,0 +1,21 @@
|
|
1
|
+
from typing import TYPE_CHECKING, Generic, TypeVar
|
2
|
+
|
3
|
+
from vellum.workflows.descriptors.base import BaseDescriptor
|
4
|
+
|
5
|
+
if TYPE_CHECKING:
|
6
|
+
from vellum.workflows.state.base import BaseState
|
7
|
+
|
8
|
+
_T = TypeVar("_T")
|
9
|
+
|
10
|
+
|
11
|
+
class ConstantValueReference(BaseDescriptor[_T], Generic[_T]):
|
12
|
+
def __init__(
|
13
|
+
self,
|
14
|
+
value: _T,
|
15
|
+
) -> None:
|
16
|
+
self._value = value
|
17
|
+
types = (type(self._value),)
|
18
|
+
super().__init__(name=str(self._value), types=types)
|
19
|
+
|
20
|
+
def resolve(self, state: "BaseState") -> _T:
|
21
|
+
return self._value
|
@@ -4,7 +4,7 @@ import logging
|
|
4
4
|
from queue import Empty, Queue
|
5
5
|
from threading import Event as ThreadingEvent, Thread
|
6
6
|
from uuid import UUID
|
7
|
-
from typing import TYPE_CHECKING, Any, Dict, Generic, Iterable, Iterator, Optional, Sequence, Set, Type, Union
|
7
|
+
from typing import TYPE_CHECKING, Any, Dict, Generic, Iterable, Iterator, List, Optional, Sequence, Set, Type, Union
|
8
8
|
|
9
9
|
from vellum.workflows.constants import UNDEF
|
10
10
|
from vellum.workflows.context import execution_context, get_parent_context
|
@@ -44,11 +44,13 @@ from vellum.workflows.events.workflow import (
|
|
44
44
|
)
|
45
45
|
from vellum.workflows.exceptions import NodeException
|
46
46
|
from vellum.workflows.nodes.bases import BaseNode
|
47
|
+
from vellum.workflows.nodes.bases.base import NodeRunResponse
|
47
48
|
from vellum.workflows.outputs import BaseOutputs
|
48
49
|
from vellum.workflows.outputs.base import BaseOutput
|
49
50
|
from vellum.workflows.ports.port import Port
|
50
51
|
from vellum.workflows.references import ExternalInputReference, OutputReference
|
51
52
|
from vellum.workflows.state.base import BaseState
|
53
|
+
from vellum.workflows.types.cycle_map import CycleMap
|
52
54
|
from vellum.workflows.types.generics import OutputsType, StateType, WorkflowInputsType
|
53
55
|
|
54
56
|
if TYPE_CHECKING:
|
@@ -72,6 +74,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
72
74
|
entrypoint_nodes: Optional[RunFromNodeArg] = None,
|
73
75
|
external_inputs: Optional[ExternalInputsArg] = None,
|
74
76
|
cancel_signal: Optional[ThreadingEvent] = None,
|
77
|
+
node_output_mocks: Optional[List[BaseOutputs]] = None,
|
75
78
|
parent_context: Optional[ParentContext] = None,
|
76
79
|
):
|
77
80
|
if state and external_inputs:
|
@@ -123,6 +126,7 @@ class WorkflowRunner(Generic[StateType]):
|
|
123
126
|
|
124
127
|
self._dependencies: Dict[Type[BaseNode], Set[Type[BaseNode]]] = defaultdict(set)
|
125
128
|
self._state_forks: Set[StateType] = {self._initial_state}
|
129
|
+
self._mocks_by_node_outputs_class = CycleMap(items=node_output_mocks or [], key_by=lambda mock: mock.__class__)
|
126
130
|
|
127
131
|
self._active_nodes_by_execution_id: Dict[UUID, BaseNode[StateType]] = {}
|
128
132
|
self._cancel_signal = cancel_signal
|
@@ -178,8 +182,12 @@ class WorkflowRunner(Generic[StateType]):
|
|
178
182
|
node_definition=node.__class__,
|
179
183
|
parent=parent_context,
|
180
184
|
)
|
181
|
-
|
182
|
-
|
185
|
+
node_run_response: NodeRunResponse
|
186
|
+
if node.Outputs not in self._mocks_by_node_outputs_class:
|
187
|
+
with execution_context(parent_context=updated_parent_context):
|
188
|
+
node_run_response = node.run()
|
189
|
+
else:
|
190
|
+
node_run_response = self._mocks_by_node_outputs_class[node.Outputs]
|
183
191
|
ports = node.Ports()
|
184
192
|
if not isinstance(node_run_response, (BaseOutputs, Iterator)):
|
185
193
|
raise NodeException(
|
@@ -0,0 +1,34 @@
|
|
1
|
+
from typing import Callable, Dict, Generic, List, TypeVar
|
2
|
+
|
3
|
+
_K = TypeVar("_K")
|
4
|
+
_T = TypeVar("_T")
|
5
|
+
|
6
|
+
|
7
|
+
class CycleMap(Generic[_K, _T]):
|
8
|
+
"""
|
9
|
+
A map that cycles through a list of items for each key.
|
10
|
+
"""
|
11
|
+
|
12
|
+
def __init__(self, items: List[_T], key_by: Callable[[_T], _K]):
|
13
|
+
self._items: Dict[_K, List[_T]] = {}
|
14
|
+
for item in items:
|
15
|
+
self._add_item(key_by(item), item)
|
16
|
+
|
17
|
+
def _add_item(self, key: _K, item: _T):
|
18
|
+
if key not in self._items:
|
19
|
+
self._items[key] = []
|
20
|
+
self._items[key].append(item)
|
21
|
+
|
22
|
+
def _get_item(self, key: _K) -> _T:
|
23
|
+
item = self._items[key].pop(0)
|
24
|
+
self._items[key].append(item)
|
25
|
+
return item
|
26
|
+
|
27
|
+
def __getitem__(self, key: _K) -> _T:
|
28
|
+
return self._get_item(key)
|
29
|
+
|
30
|
+
def __setitem__(self, key: _K, value: _T):
|
31
|
+
self._add_item(key, value)
|
32
|
+
|
33
|
+
def __contains__(self, key: _K) -> bool:
|
34
|
+
return key in self._items
|
@@ -1,16 +1,7 @@
|
|
1
|
-
# flake8: noqa: E402
|
2
|
-
|
3
|
-
import importlib
|
4
|
-
import inspect
|
5
|
-
|
6
|
-
from vellum.plugins.utils import load_runtime_plugins
|
7
|
-
from vellum.workflows.utils.uuids import uuid4_from_hash
|
8
|
-
from vellum.workflows.workflows.event_filters import workflow_event_filter
|
9
|
-
|
10
|
-
load_runtime_plugins()
|
11
|
-
|
12
1
|
from datetime import datetime
|
13
2
|
from functools import lru_cache
|
3
|
+
import importlib
|
4
|
+
import inspect
|
14
5
|
from threading import Event as ThreadingEvent
|
15
6
|
from uuid import UUID, uuid4
|
16
7
|
from typing import (
|
@@ -79,6 +70,8 @@ from vellum.workflows.state.context import WorkflowContext
|
|
79
70
|
from vellum.workflows.state.store import Store
|
80
71
|
from vellum.workflows.types.generics import StateType, WorkflowInputsType
|
81
72
|
from vellum.workflows.types.utils import get_original_base
|
73
|
+
from vellum.workflows.utils.uuids import uuid4_from_hash
|
74
|
+
from vellum.workflows.workflows.event_filters import workflow_event_filter
|
82
75
|
|
83
76
|
|
84
77
|
class _BaseWorkflowMeta(type):
|
@@ -194,6 +187,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
194
187
|
entrypoint_nodes: Optional[RunFromNodeArg] = None,
|
195
188
|
external_inputs: Optional[ExternalInputsArg] = None,
|
196
189
|
cancel_signal: Optional[ThreadingEvent] = None,
|
190
|
+
node_output_mocks: Optional[List[BaseOutputs]] = None,
|
197
191
|
) -> TerminalWorkflowEvent:
|
198
192
|
"""
|
199
193
|
Invoke a Workflow, returning the last event emitted, which should be one of:
|
@@ -218,6 +212,9 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
218
212
|
|
219
213
|
cancel_signal: Optional[ThreadingEvent] = None
|
220
214
|
A threading event that can be used to cancel the Workflow Execution.
|
215
|
+
|
216
|
+
node_output_mocks: Optional[List[Outputs]] = None
|
217
|
+
A list of Outputs to mock for Nodes during Workflow Execution.
|
221
218
|
"""
|
222
219
|
|
223
220
|
events = WorkflowRunner(
|
@@ -227,6 +224,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
227
224
|
entrypoint_nodes=entrypoint_nodes,
|
228
225
|
external_inputs=external_inputs,
|
229
226
|
cancel_signal=cancel_signal,
|
227
|
+
node_output_mocks=node_output_mocks,
|
230
228
|
parent_context=self._context.parent_context,
|
231
229
|
).stream()
|
232
230
|
first_event: Optional[Union[WorkflowExecutionInitiatedEvent, WorkflowExecutionResumedEvent]] = None
|
@@ -290,6 +288,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
290
288
|
entrypoint_nodes: Optional[RunFromNodeArg] = None,
|
291
289
|
external_inputs: Optional[ExternalInputsArg] = None,
|
292
290
|
cancel_signal: Optional[ThreadingEvent] = None,
|
291
|
+
node_output_mocks: Optional[List[BaseOutputs]] = None,
|
293
292
|
) -> WorkflowEventStream:
|
294
293
|
"""
|
295
294
|
Invoke a Workflow, yielding events as they are emitted.
|
@@ -315,6 +314,9 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
315
314
|
|
316
315
|
cancel_signal: Optional[ThreadingEvent] = None
|
317
316
|
A threading event that can be used to cancel the Workflow Execution.
|
317
|
+
|
318
|
+
node_output_mocks: Optional[List[Outputs]] = None
|
319
|
+
A list of Outputs to mock for Nodes during Workflow Execution.
|
318
320
|
"""
|
319
321
|
|
320
322
|
should_yield = event_filter or workflow_event_filter
|
@@ -325,6 +327,7 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
325
327
|
entrypoint_nodes=entrypoint_nodes,
|
326
328
|
external_inputs=external_inputs,
|
327
329
|
cancel_signal=cancel_signal,
|
330
|
+
node_output_mocks=node_output_mocks,
|
328
331
|
parent_context=self.context.parent_context,
|
329
332
|
).stream():
|
330
333
|
if should_yield(self.__class__, event):
|
@@ -416,7 +419,6 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
416
419
|
def load_from_module(module_path: str) -> Type["BaseWorkflow"]:
|
417
420
|
workflow_path = f"{module_path}.workflow"
|
418
421
|
module = importlib.import_module(workflow_path)
|
419
|
-
|
420
422
|
workflows: List[Type[BaseWorkflow]] = []
|
421
423
|
for name in dir(module):
|
422
424
|
if name.startswith("__"):
|
@@ -435,9 +437,40 @@ class BaseWorkflow(Generic[WorkflowInputsType, StateType], metaclass=_BaseWorkfl
|
|
435
437
|
raise ValueError(f"No workflows found in {module_path}")
|
436
438
|
elif len(workflows) > 1:
|
437
439
|
raise ValueError(f"Multiple workflows found in {module_path}")
|
440
|
+
try:
|
441
|
+
BaseWorkflow.import_node_display(module_path)
|
442
|
+
except ModuleNotFoundError:
|
443
|
+
pass
|
438
444
|
|
439
445
|
return workflows[0]
|
440
446
|
|
447
|
+
@staticmethod
|
448
|
+
def import_node_display(module_path):
|
449
|
+
# Import the nodes package
|
450
|
+
nodes_package = importlib.import_module(f"{module_path}.display.nodes")
|
451
|
+
# Use the loader to get the code
|
452
|
+
if hasattr(nodes_package, "__spec__") and nodes_package.__spec__ and nodes_package.__spec__.loader:
|
453
|
+
loader = nodes_package.__spec__.loader
|
454
|
+
|
455
|
+
# Check if the loader has a code attribute
|
456
|
+
if hasattr(loader, "code"):
|
457
|
+
code = loader.code
|
458
|
+
|
459
|
+
# Parse the code to find import statements
|
460
|
+
import_lines = [line.strip() for line in code.splitlines() if line.startswith("from ")]
|
461
|
+
|
462
|
+
# Import each module specified in the code
|
463
|
+
for line in import_lines:
|
464
|
+
try:
|
465
|
+
# Extract module name from the import line
|
466
|
+
module_name = line.split(" ")[1]
|
467
|
+
full_module_path = f"{module_path}.display.nodes{module_name}"
|
468
|
+
importlib.import_module(full_module_path)
|
469
|
+
except Exception:
|
470
|
+
continue
|
471
|
+
# Also import from workflow.py
|
472
|
+
importlib.import_module(f"{module_path}.display.workflow")
|
473
|
+
|
441
474
|
|
442
475
|
WorkflowExecutionInitiatedBody.model_rebuild()
|
443
476
|
WorkflowExecutionFulfilledBody.model_rebuild()
|
@@ -72,12 +72,14 @@ vellum_ee/workflows/display/workflows/__init__.py,sha256=kapXsC67VJcgSuiBMa86Fde
|
|
72
72
|
vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256=ydAbFMzcY2LURINZbXYm9BAXZdIa3-7rQ86Kupo7qcA,12804
|
73
73
|
vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py,sha256=AMxNnTm2z3LIR5rqxoCAfuy37F2FTuSRDVtKUoezO8M,1184
|
74
74
|
vellum_ee/workflows/display/workflows/vellum_workflow_display.py,sha256=GhIviEMDWNw1p8z20ta08T5PeNCVJs5p2hrOX1uyNxg,17066
|
75
|
-
|
75
|
+
vellum_ee/workflows/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
76
|
+
vellum_ee/workflows/server/virtual_file_loader.py,sha256=sQQFqn7xI0GfUlypWkVFl-X5ShXTh9bXq8N6qlQvSoE,1452
|
77
|
+
vellum/__init__.py,sha256=eS4rbj1wrussRENYQRA1GZ5JgcbIlqDad_ARuOihIYE,35624
|
76
78
|
vellum/client/README.md,sha256=JkCJjmMZl4jrPj46pkmL9dpK4gSzQQmP5I7z4aME4LY,4749
|
77
79
|
vellum/client/__init__.py,sha256=z59nOGe27vMDqsU-ljfULBwC5J4nyrqFunhmo8xnxbU,111521
|
78
80
|
vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
|
79
81
|
vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
80
|
-
vellum/client/core/client_wrapper.py,sha256=
|
82
|
+
vellum/client/core/client_wrapper.py,sha256=eCp0imbOxsKZE981B9ikNz1dw1PL356Io3515uO93DY,1869
|
81
83
|
vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
82
84
|
vellum/client/core/file.py,sha256=X9IbmkZmB2bB_DpmZAO3crWdXagOakAyn6UCOCImCPg,2322
|
83
85
|
vellum/client/core/http_client.py,sha256=R0pQpCppnEtxccGvXl4uJ76s7ro_65Fo_erlNNLp_AI,19228
|
@@ -656,7 +658,7 @@ vellum/evaluations/utils/env.py,sha256=Xj_nxsoU5ox06EOTjRopR4lrigQI6Le6qbWGltYoE
|
|
656
658
|
vellum/evaluations/utils/exceptions.py,sha256=dXMAkzqbHV_AP5FjjbegPlfUE0zQDlpA3qOsoOJUxfg,49
|
657
659
|
vellum/evaluations/utils/paginator.py,sha256=rEED_BJAXAM6tM1yMwHePNzszjq_tTq4NbQvi1jWQ_Q,697
|
658
660
|
vellum/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
659
|
-
vellum/plugins/pydantic.py,sha256=
|
661
|
+
vellum/plugins/pydantic.py,sha256=dNtZWHo-IdseG52C2RoTanxyTJg0AhPZrH-9lbNqwYg,2604
|
660
662
|
vellum/plugins/utils.py,sha256=U9ZY9KdE3RRvbcG01hXxu9CvfJD6Fo7nJDgcHjQn0FI,606
|
661
663
|
vellum/plugins/vellum_mypy.py,sha256=VC15EzjTsXOb9uF1bky4rcxePP-0epMVmCsLB2z4Dh8,24816
|
662
664
|
vellum/prompts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -1218,7 +1220,7 @@ vellum/workflows/constants.py,sha256=Z0W4YlqfSlSgWC11PrVUPs6ZOBeIaQ78E_90J1hohiw
|
|
1218
1220
|
vellum/workflows/context.py,sha256=R8qdsFbD_0p7B6PWnyvSrZ_aOgMtGw-_uk0P0UAmwLA,1230
|
1219
1221
|
vellum/workflows/descriptors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1220
1222
|
vellum/workflows/descriptors/base.py,sha256=BhYd5O9_3fjS_Vet9Q2_kyUJCySHGVM_HWaOBtctkNA,14320
|
1221
|
-
vellum/workflows/descriptors/tests/test_utils.py,sha256=
|
1223
|
+
vellum/workflows/descriptors/tests/test_utils.py,sha256=1CFx6Yejrg_AlwSO6kqGjuav4-ZubCLN0u4NtR3KgCk,4510
|
1222
1224
|
vellum/workflows/descriptors/utils.py,sha256=lO_dbr5g3PXpHPtVBkdguAK4-1qayZ7RXjl3BgAhrMM,3795
|
1223
1225
|
vellum/workflows/edges/__init__.py,sha256=wSkmAnz9xyi4vZwtDbKxwlplt2skD7n3NsxkvR_pUus,50
|
1224
1226
|
vellum/workflows/edges/edge.py,sha256=N0SnY3gKVuxImPAdCbPMPlHJIXbkQ3fwq_LbJRvVMFc,677
|
@@ -1234,7 +1236,7 @@ vellum/workflows/events/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5N
|
|
1234
1236
|
vellum/workflows/events/tests/test_event.py,sha256=izB6Y9U5ROgmHBBpLNUY2navK4-qFp6hdJqJNz6Beek,13350
|
1235
1237
|
vellum/workflows/events/types.py,sha256=cjRE8WL8tYCFradd9NOGl_H0mN3LiWWnA1uHmyT2Q0Q,3412
|
1236
1238
|
vellum/workflows/events/workflow.py,sha256=l5tXes0sg7iWaA1ZUE5dtAqNnGQ8iy6trVbOU9meu7U,5240
|
1237
|
-
vellum/workflows/exceptions.py,sha256=
|
1239
|
+
vellum/workflows/exceptions.py,sha256=l-FLGvXywxg6ivolCts71b8pcsYAWoB1cmUR4Jx7N8g,614
|
1238
1240
|
vellum/workflows/expressions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1239
1241
|
vellum/workflows/expressions/accessor.py,sha256=OFvAHAVABr-k7GceIhtzIurV4OuV_yHft7JPRsq87Es,1472
|
1240
1242
|
vellum/workflows/expressions/and_.py,sha256=I7lNqrUM3-m_5hmjjiMhaHhJtKcLj39kEFVWPDOqwfo,916
|
@@ -1273,29 +1275,30 @@ vellum/workflows/inputs/base.py,sha256=1kMgr0WqCYdWUqgFvgSoAMw2067FAlgwhGXLgbIOr
|
|
1273
1275
|
vellum/workflows/logging.py,sha256=_a217XogktV4Ncz6xKFz7WfYmZAzkfVRVuC0rWob8ls,437
|
1274
1276
|
vellum/workflows/nodes/__init__.py,sha256=aVdQVv7Y3Ro3JlqXGpxwaU2zrI06plDHD2aumH5WUIs,1157
|
1275
1277
|
vellum/workflows/nodes/bases/__init__.py,sha256=cniHuz_RXdJ4TQgD8CBzoiKDiPxg62ErdVpCbWICX64,58
|
1276
|
-
vellum/workflows/nodes/bases/base.py,sha256=
|
1278
|
+
vellum/workflows/nodes/bases/base.py,sha256=RREFzYPxemKUvQc0NfnwQmby-p_BE3O-TbVWKbQFdfs,14271
|
1279
|
+
vellum/workflows/nodes/bases/base_adornment_node.py,sha256=eFTgsPCYb3eyGS0-kw7C6crFnwFx437R5wh9-8bWYts,2905
|
1277
1280
|
vellum/workflows/nodes/bases/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1278
|
-
vellum/workflows/nodes/bases/tests/test_base_node.py,sha256=
|
1281
|
+
vellum/workflows/nodes/bases/tests/test_base_node.py,sha256=51CueFVty9XYASC0rKr1cXWejho5WElmhfhp6cCONy0,3811
|
1279
1282
|
vellum/workflows/nodes/core/__init__.py,sha256=5zDMCmyt1v0HTJzlUBwq3U9L825yZGZhT9JL18-mRR4,455
|
1280
1283
|
vellum/workflows/nodes/core/error_node/__init__.py,sha256=g7RRnlHhqu4qByfLjBwCunmgGA8dI5gNsjS3h6TwlSI,60
|
1281
1284
|
vellum/workflows/nodes/core/error_node/node.py,sha256=MFHU5vITYSK-L9CuMZ49In2ZeNLWnhZD0f8r5dWvb5Y,1270
|
1282
1285
|
vellum/workflows/nodes/core/inline_subworkflow_node/__init__.py,sha256=nKNEH1QTl-1PcvmYoqSWEl0-t6gAur8GLTXHzklRQfM,84
|
1283
|
-
vellum/workflows/nodes/core/inline_subworkflow_node/node.py,sha256=
|
1286
|
+
vellum/workflows/nodes/core/inline_subworkflow_node/node.py,sha256=zyKvpeNoD2D7vtGbNuvqXUxVgtkmSbk1XQExwX9KUf8,3602
|
1284
1287
|
vellum/workflows/nodes/core/inline_subworkflow_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1285
1288
|
vellum/workflows/nodes/core/inline_subworkflow_node/tests/test_node.py,sha256=99LLPgHMqohGL-G4KqOSjeVlDR_FH8Rv1gtRii85MC4,1143
|
1286
1289
|
vellum/workflows/nodes/core/map_node/__init__.py,sha256=MXpZYmGfhsMJHqqlpd64WiJRtbAtAMQz-_3fCU_cLV0,56
|
1287
|
-
vellum/workflows/nodes/core/map_node/node.py,sha256=
|
1290
|
+
vellum/workflows/nodes/core/map_node/node.py,sha256=DTMoGqtR8MyfZ8jy8apNoN-4KFFFHywo87pfpqyBVEw,7322
|
1288
1291
|
vellum/workflows/nodes/core/map_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1289
1292
|
vellum/workflows/nodes/core/map_node/tests/test_node.py,sha256=RHSZs7t6mW3UWvRrXnHZqaXVdRT2ZquOK_YHJ-gzXsU,1871
|
1290
1293
|
vellum/workflows/nodes/core/retry_node/__init__.py,sha256=lN2bIy5a3Uzhs_FYCrooADyYU6ZGShtvLKFWpelwPvo,60
|
1291
|
-
vellum/workflows/nodes/core/retry_node/node.py,sha256=
|
1294
|
+
vellum/workflows/nodes/core/retry_node/node.py,sha256=lAABgo2E_pWkzOYUBGzC1SnywgwtGuJojwT602fKCUc,3153
|
1292
1295
|
vellum/workflows/nodes/core/retry_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1293
1296
|
vellum/workflows/nodes/core/retry_node/tests/test_node.py,sha256=QXTnHwmJHISxXjvZMeuuEo0iVugVMJyaJoggI8yKXfI,3132
|
1294
1297
|
vellum/workflows/nodes/core/templating_node/__init__.py,sha256=GmyuYo81_A1_Bz6id69ozVFS6FKiuDsZTiA3I6MaL2U,70
|
1295
1298
|
vellum/workflows/nodes/core/templating_node/node.py,sha256=N-NOBd-UY91qO9orCcW4KEbhNvDQivZPA-PCxs-M0RM,4204
|
1296
1299
|
vellum/workflows/nodes/core/templating_node/tests/test_templating_node.py,sha256=nW_kyJ9RAqz45_kJE_rlhOOvbV4OO3hecP-P-ydQpkw,2845
|
1297
1300
|
vellum/workflows/nodes/core/try_node/__init__.py,sha256=JVD4DrldTIqFQQFrubs9KtWCCc0YCAc7Fzol5ZWIWeM,56
|
1298
|
-
vellum/workflows/nodes/core/try_node/node.py,sha256=
|
1301
|
+
vellum/workflows/nodes/core/try_node/node.py,sha256=_lTmSYCiz7lktaxpNWUCglNi8_5Sfy8Rpiov5SeKVMw,3920
|
1299
1302
|
vellum/workflows/nodes/core/try_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1300
1303
|
vellum/workflows/nodes/core/try_node/tests/test_node.py,sha256=Wc2kLl-MkffsBxl3IiFaqLd16e2Iosxhk7qBnojPvQg,4092
|
1301
1304
|
vellum/workflows/nodes/displayable/__init__.py,sha256=6F_4DlSwvHuilWnIalp8iDjjDXl0Nmz4QzJV2PYe5RI,1023
|
@@ -1346,7 +1349,7 @@ vellum/workflows/nodes/experimental/README.md,sha256=eF6DfIL8t-HbF9-mcofOMymKrra
|
|
1346
1349
|
vellum/workflows/nodes/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1347
1350
|
vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py,sha256=lsyD9laR9p7kx5-BXGH2gUTM242UhKy8SMV0SR6S2iE,90
|
1348
1351
|
vellum/workflows/nodes/experimental/openai_chat_completion_node/node.py,sha256=1EGeiaT-Zoo6pttQFKKBcdf3dmhAbjKGaErYD5FFwlc,10185
|
1349
|
-
vellum/workflows/nodes/utils.py,sha256=
|
1352
|
+
vellum/workflows/nodes/utils.py,sha256=chSsmKe_BsvMIJpzSxO5TWYlr3sAuxiwkfB5azkuN5Q,2715
|
1350
1353
|
vellum/workflows/outputs/__init__.py,sha256=AyZ4pRh_ACQIGvkf0byJO46EDnSix1ZCAXfvh-ms1QE,94
|
1351
1354
|
vellum/workflows/outputs/base.py,sha256=a7W6rNSDSawwGAXYjNTF2iHb9lnZu7WFSOagZIyy__k,7976
|
1352
1355
|
vellum/workflows/ports/__init__.py,sha256=bZuMt-R7z5bKwpu4uPW7LlJeePOQWmCcDSXe5frUY5g,101
|
@@ -1354,6 +1357,7 @@ vellum/workflows/ports/node_ports.py,sha256=g4A-8iUAvEJSkaWppbvzAR8XU02R9U-qLN4r
|
|
1354
1357
|
vellum/workflows/ports/port.py,sha256=rc3GB7dDQCUs0IbY08a92-31YzJHQgBeww13brSJ2Js,3172
|
1355
1358
|
vellum/workflows/ports/utils.py,sha256=pEjVNJKw9LhD_cFN-o0MWBOW2ejno7jv26qqzjLxwS4,1662
|
1356
1359
|
vellum/workflows/references/__init__.py,sha256=glHFC1VfXmcbNvH5VzFbkT03d8_D7MMcvEcsUBrzLIs,591
|
1360
|
+
vellum/workflows/references/constant.py,sha256=6yUT4q1sMj1hkI_tzzQ9AYcmeeDYFUNCqUq_W2DN0S8,540
|
1357
1361
|
vellum/workflows/references/environment_variable.py,sha256=7FFtiKfc4eyVkkfUbhc666OBNDqvFlMoNQEYmGpEVVE,661
|
1358
1362
|
vellum/workflows/references/execution_count.py,sha256=JILHqt8ELdc9ct-WsVCA5X-rKiP1rmJODw-XTf4kpHI,722
|
1359
1363
|
vellum/workflows/references/external_input.py,sha256=XHugauKYvAmsGoFnjgJh00FcXjSMIqBvRun_CZuJD64,1662
|
@@ -1367,7 +1371,7 @@ vellum/workflows/references/workflow_input.py,sha256=86IuhlBz-9cGxeUzizyjdp482aj
|
|
1367
1371
|
vellum/workflows/resolvers/__init__.py,sha256=eH6hTvZO4IciDaf_cf7aM2vs-DkBDyJPycOQevJxQnI,82
|
1368
1372
|
vellum/workflows/resolvers/base.py,sha256=WHra9LRtlTuB1jmuNqkfVE2JUgB61Cyntn8f0b0WZg4,411
|
1369
1373
|
vellum/workflows/runner/__init__.py,sha256=i1iG5sAhtpdsrlvwgH6B-m49JsINkiWyPWs8vyT-bqM,72
|
1370
|
-
vellum/workflows/runner/runner.py,sha256=
|
1374
|
+
vellum/workflows/runner/runner.py,sha256=wS9GulRM8f77sQ9-FlDpHjQJgoMLt0cTDRSTpucTyr0,28139
|
1371
1375
|
vellum/workflows/sandbox.py,sha256=GVJzVjMuYzOBnSrboB0_6MMRZWBluAyQ2o7syeaeBd0,2235
|
1372
1376
|
vellum/workflows/state/__init__.py,sha256=yUUdR-_Vl7UiixNDYQZ-GEM_kJI9dnOia75TtuNEsnE,60
|
1373
1377
|
vellum/workflows/state/base.py,sha256=jpSzF1OQd3-fqi6dMGlNsQl-7JnJxCdzWIigmX8Wz-I,14425
|
@@ -1380,6 +1384,7 @@ vellum/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3
|
|
1380
1384
|
vellum/workflows/tests/test_sandbox.py,sha256=JKwaluI-lODQo7Ek9sjDstjL_WTdSqUlVik6ZVTfVOA,1826
|
1381
1385
|
vellum/workflows/types/__init__.py,sha256=KxUTMBGzuRCfiMqzzsykOeVvrrkaZmTTo1a7SLu8gRM,68
|
1382
1386
|
vellum/workflows/types/core.py,sha256=D2NcSBwGgWj_mtXZqe3KnEQcb5qd5HzqAwnxwmlCfCw,899
|
1387
|
+
vellum/workflows/types/cycle_map.py,sha256=-ZMQsKzZBpCi0bchJrkuN_dtCFuz9uFABy7Fq2PI58E,928
|
1383
1388
|
vellum/workflows/types/generics.py,sha256=ZkfoRhWs042i5IjA99v2wIhmh1u-Wieo3LzosgGWJVk,600
|
1384
1389
|
vellum/workflows/types/stack.py,sha256=RDSGLkcV612ge8UuAH9TZiEGXxJt0Av2-H5rfzrTVVI,1014
|
1385
1390
|
vellum/workflows/types/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -1397,10 +1402,10 @@ vellum/workflows/utils/uuids.py,sha256=DFzPv9RCvsKhvdTEIQyfSek2A31D6S_QcmeLPbgrg
|
|
1397
1402
|
vellum/workflows/utils/vellum_variables.py,sha256=g5xHYB8etfHE32ek19nP6Anf8NyjhmUtOwO2KmQ5xZU,3111
|
1398
1403
|
vellum/workflows/vellum_client.py,sha256=ODrq_TSl-drX2aezXegf7pizpWDVJuTXH-j6528t75s,683
|
1399
1404
|
vellum/workflows/workflows/__init__.py,sha256=KY45TqvavCCvXIkyCFMEc0dc6jTMOUci93U2DUrlZYc,66
|
1400
|
-
vellum/workflows/workflows/base.py,sha256=
|
1405
|
+
vellum/workflows/workflows/base.py,sha256=qdZYQq-jjdr0fYT0FCfmFuI5ypE3pANupgYcOqqML0o,18884
|
1401
1406
|
vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnadGsrSZGa7t7LpJA,2008
|
1402
|
-
vellum_ai-0.12.
|
1403
|
-
vellum_ai-0.12.
|
1404
|
-
vellum_ai-0.12.
|
1405
|
-
vellum_ai-0.12.
|
1406
|
-
vellum_ai-0.12.
|
1407
|
+
vellum_ai-0.12.14.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
|
1408
|
+
vellum_ai-0.12.14.dist-info/METADATA,sha256=pMVsOWid4kf15trT9SP0w9_wfPpc-E6i4dV8rsUAfSc,5161
|
1409
|
+
vellum_ai-0.12.14.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
1410
|
+
vellum_ai-0.12.14.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
|
1411
|
+
vellum_ai-0.12.14.dist-info/RECORD,,
|
File without changes
|
@@ -0,0 +1,42 @@
|
|
1
|
+
import importlib
|
2
|
+
|
3
|
+
|
4
|
+
class VirtualFileLoader(importlib.abc.Loader):
|
5
|
+
def __init__(self, code: str, is_package: bool):
|
6
|
+
self.code = code
|
7
|
+
self.is_package = is_package
|
8
|
+
|
9
|
+
def create_module(self, spec):
|
10
|
+
return None # use default module creation
|
11
|
+
|
12
|
+
def exec_module(self, module):
|
13
|
+
if not self.is_package:
|
14
|
+
exec(self.code, module.__dict__)
|
15
|
+
|
16
|
+
|
17
|
+
class VirtualFileFinder(importlib.abc.MetaPathFinder, importlib.abc.Loader):
|
18
|
+
def __init__(self, files: dict[str, str], namespace: str):
|
19
|
+
self.files = files
|
20
|
+
self.namespace = namespace
|
21
|
+
|
22
|
+
def find_spec(self, fullname, path, target=None):
|
23
|
+
# Do the namespacing on the fly to avoid having to copy the file dict
|
24
|
+
prefixed_name = fullname if fullname.startswith(self.namespace) else f"{self.namespace}.{fullname}"
|
25
|
+
|
26
|
+
key_name = "__init__" if fullname == self.namespace else fullname.replace(f"{self.namespace}.", "")
|
27
|
+
|
28
|
+
files_key = f"{key_name.replace('.', '/')}.py"
|
29
|
+
if not self.files.get(files_key):
|
30
|
+
files_key = f"{key_name.replace('.', '/')}/__init__.py"
|
31
|
+
|
32
|
+
file = self.files.get(files_key)
|
33
|
+
is_package = "__init__" in files_key
|
34
|
+
|
35
|
+
if file:
|
36
|
+
return importlib.machinery.ModuleSpec(
|
37
|
+
prefixed_name,
|
38
|
+
VirtualFileLoader(file, is_package),
|
39
|
+
origin=prefixed_name,
|
40
|
+
is_package=is_package,
|
41
|
+
)
|
42
|
+
return None
|
File without changes
|
File without changes
|
File without changes
|