vellum-ai 0.14.17__py3-none-any.whl → 0.14.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/client/core/client_wrapper.py +1 -1
- vellum/workflows/events/workflow.py +15 -1
- vellum/workflows/nodes/core/retry_node/node.py +59 -39
- vellum/workflows/nodes/core/templating_node/node.py +2 -2
- vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py +116 -0
- vellum/workflows/nodes/displayable/inline_prompt_node/node.py +20 -0
- vellum/workflows/runner/runner.py +16 -1
- {vellum_ai-0.14.17.dist-info → vellum_ai-0.14.18.dist-info}/METADATA +1 -1
- {vellum_ai-0.14.17.dist-info → vellum_ai-0.14.18.dist-info}/RECORD +21 -20
- vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +6 -1
- vellum_ee/workflows/display/nodes/vellum/templating_node.py +6 -7
- vellum_ee/workflows/display/nodes/vellum/tests/test_templating_node.py +97 -0
- vellum_ee/workflows/display/nodes/vellum/utils.py +1 -1
- vellum_ee/workflows/display/tests/workflow_serialization/test_basic_templating_node_serialization.py +1 -1
- vellum_ee/workflows/display/vellum.py +1 -148
- vellum_ee/workflows/display/workflows/base_workflow_display.py +1 -1
- vellum_ee/workflows/display/workflows/tests/test_workflow_display.py +13 -13
- vellum_ee/workflows/tests/test_display_meta.py +10 -10
- {vellum_ai-0.14.17.dist-info → vellum_ai-0.14.18.dist-info}/LICENSE +0 -0
- {vellum_ai-0.14.17.dist-info → vellum_ai-0.14.18.dist-info}/WHEEL +0 -0
- {vellum_ai-0.14.17.dist-info → vellum_ai-0.14.18.dist-info}/entry_points.txt +0 -0
@@ -18,7 +18,7 @@ class BaseClientWrapper:
|
|
18
18
|
headers: typing.Dict[str, str] = {
|
19
19
|
"X-Fern-Language": "Python",
|
20
20
|
"X-Fern-SDK-Name": "vellum-ai",
|
21
|
-
"X-Fern-SDK-Version": "0.14.
|
21
|
+
"X-Fern-SDK-Version": "0.14.18",
|
22
22
|
}
|
23
23
|
headers["X_API_KEY"] = self.api_key
|
24
24
|
return headers
|
@@ -48,7 +48,7 @@ class NodeEventDisplayContext(UniversalBaseModel):
|
|
48
48
|
|
49
49
|
|
50
50
|
class WorkflowEventDisplayContext(UniversalBaseModel):
|
51
|
-
node_displays: Dict[
|
51
|
+
node_displays: Dict[UUID, NodeEventDisplayContext]
|
52
52
|
workflow_inputs: Dict[str, UUID]
|
53
53
|
workflow_outputs: Dict[str, UUID]
|
54
54
|
|
@@ -194,6 +194,12 @@ WorkflowExecutionEvent = Union[
|
|
194
194
|
WorkflowExecutionSnapshottedEvent,
|
195
195
|
]
|
196
196
|
|
197
|
+
TerminalWorkflowExecutionEvent = Union[
|
198
|
+
WorkflowExecutionFulfilledEvent,
|
199
|
+
WorkflowExecutionRejectedEvent,
|
200
|
+
WorkflowExecutionPausedEvent,
|
201
|
+
]
|
202
|
+
|
197
203
|
|
198
204
|
def is_workflow_event(event: WorkflowEvent) -> TypeGuard[WorkflowExecutionEvent]:
|
199
205
|
return (
|
@@ -205,3 +211,11 @@ def is_workflow_event(event: WorkflowEvent) -> TypeGuard[WorkflowExecutionEvent]
|
|
205
211
|
or event.name == "workflow.execution.resumed"
|
206
212
|
or event.name == "workflow.execution.rejected"
|
207
213
|
)
|
214
|
+
|
215
|
+
|
216
|
+
def is_terminal_workflow_execution_event(event: WorkflowEvent) -> TypeGuard[TerminalWorkflowExecutionEvent]:
|
217
|
+
return (
|
218
|
+
event.name == "workflow.execution.fulfilled"
|
219
|
+
or event.name == "workflow.execution.rejected"
|
220
|
+
or event.name == "workflow.execution.paused"
|
221
|
+
)
|
@@ -1,9 +1,11 @@
|
|
1
1
|
import time
|
2
2
|
from typing import Callable, Generic, Optional, Type
|
3
3
|
|
4
|
+
from vellum.workflows.context import execution_context, get_parent_context
|
4
5
|
from vellum.workflows.descriptors.base import BaseDescriptor
|
5
6
|
from vellum.workflows.descriptors.utils import resolve_value
|
6
7
|
from vellum.workflows.errors.types import WorkflowErrorCode
|
8
|
+
from vellum.workflows.events.workflow import is_terminal_workflow_execution_event
|
7
9
|
from vellum.workflows.exceptions import NodeException
|
8
10
|
from vellum.workflows.inputs.base import BaseInputs
|
9
11
|
from vellum.workflows.nodes.bases import BaseNode
|
@@ -11,6 +13,7 @@ from vellum.workflows.nodes.bases.base_adornment_node import BaseAdornmentNode
|
|
11
13
|
from vellum.workflows.nodes.utils import create_adornment
|
12
14
|
from vellum.workflows.state.context import WorkflowContext
|
13
15
|
from vellum.workflows.types.generics import StateType
|
16
|
+
from vellum.workflows.workflows.event_filters import all_workflow_event_filter
|
14
17
|
|
15
18
|
|
16
19
|
class RetryNode(BaseAdornmentNode[StateType], Generic[StateType]):
|
@@ -38,47 +41,64 @@ class RetryNode(BaseAdornmentNode[StateType], Generic[StateType]):
|
|
38
41
|
|
39
42
|
for index in range(self.max_attempts):
|
40
43
|
attempt_number = index + 1
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
terminal_event = subworkflow.run(
|
47
|
-
inputs=self.SubworkflowInputs(attempt_number=attempt_number),
|
48
|
-
node_output_mocks=self._context._get_all_node_output_mocks(),
|
49
|
-
)
|
50
|
-
if terminal_event.name == "workflow.execution.fulfilled":
|
51
|
-
node_outputs = self.Outputs()
|
52
|
-
workflow_output_vars = vars(terminal_event.outputs)
|
53
|
-
|
54
|
-
for output_name in workflow_output_vars:
|
55
|
-
setattr(node_outputs, output_name, workflow_output_vars[output_name])
|
56
|
-
|
57
|
-
return node_outputs
|
58
|
-
elif terminal_event.name == "workflow.execution.paused":
|
59
|
-
raise NodeException(
|
60
|
-
code=WorkflowErrorCode.INVALID_OUTPUTS,
|
61
|
-
message=f"Subworkflow unexpectedly paused on attempt {attempt_number}",
|
62
|
-
)
|
63
|
-
elif self.retry_on_error_code and self.retry_on_error_code != terminal_event.error.code:
|
64
|
-
raise NodeException(
|
65
|
-
code=WorkflowErrorCode.INVALID_OUTPUTS,
|
66
|
-
message=f"""Unexpected rejection on attempt {attempt_number}: {terminal_event.error.code.value}.
|
67
|
-
Message: {terminal_event.error.message}""",
|
44
|
+
parent_context = get_parent_context()
|
45
|
+
with execution_context(parent_context=parent_context):
|
46
|
+
subworkflow = self.subworkflow(
|
47
|
+
parent_state=self.state,
|
48
|
+
context=WorkflowContext(vellum_client=self._context.vellum_client),
|
68
49
|
)
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
Message: {terminal_event.error.message}""",
|
50
|
+
subworkflow_stream = subworkflow.stream(
|
51
|
+
inputs=self.SubworkflowInputs(attempt_number=attempt_number),
|
52
|
+
event_filter=all_workflow_event_filter,
|
53
|
+
node_output_mocks=self._context._get_all_node_output_mocks(),
|
74
54
|
)
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
)
|
80
|
-
|
81
|
-
|
55
|
+
|
56
|
+
node_outputs: Optional[BaseNode.Outputs] = None
|
57
|
+
exception: Optional[NodeException] = None
|
58
|
+
for event in subworkflow_stream:
|
59
|
+
self._context._emit_subworkflow_event(event)
|
60
|
+
|
61
|
+
if not is_terminal_workflow_execution_event(event):
|
62
|
+
continue
|
63
|
+
|
64
|
+
if event.workflow_definition != self.subworkflow:
|
65
|
+
continue
|
66
|
+
|
67
|
+
if event.name == "workflow.execution.fulfilled":
|
68
|
+
node_outputs = self.Outputs()
|
69
|
+
|
70
|
+
for output_descriptor, output_value in event.outputs:
|
71
|
+
setattr(node_outputs, output_descriptor.name, output_value)
|
72
|
+
elif event.name == "workflow.execution.paused":
|
73
|
+
exception = NodeException(
|
74
|
+
code=WorkflowErrorCode.INVALID_OUTPUTS,
|
75
|
+
message=f"Subworkflow unexpectedly paused on attempt {attempt_number}",
|
76
|
+
)
|
77
|
+
elif self.retry_on_error_code and self.retry_on_error_code != event.error.code:
|
78
|
+
exception = NodeException(
|
79
|
+
code=WorkflowErrorCode.INVALID_OUTPUTS,
|
80
|
+
message=f"""Unexpected rejection on attempt {attempt_number}: {event.error.code.value}.
|
81
|
+
Message: {event.error.message}""",
|
82
|
+
)
|
83
|
+
elif self.retry_on_condition and not resolve_value(self.retry_on_condition, self.state):
|
84
|
+
exception = NodeException(
|
85
|
+
code=WorkflowErrorCode.INVALID_OUTPUTS,
|
86
|
+
message=f"""Rejection failed on attempt {attempt_number}: {event.error.code.value}.
|
87
|
+
Message: {event.error.message}""",
|
88
|
+
)
|
89
|
+
else:
|
90
|
+
last_exception = NodeException(
|
91
|
+
event.error.message,
|
92
|
+
code=event.error.code,
|
93
|
+
)
|
94
|
+
if self.delay:
|
95
|
+
time.sleep(self.delay)
|
96
|
+
|
97
|
+
if exception:
|
98
|
+
raise exception
|
99
|
+
|
100
|
+
if node_outputs:
|
101
|
+
return node_outputs
|
82
102
|
|
83
103
|
raise last_exception
|
84
104
|
|
@@ -48,10 +48,10 @@ class TemplatingNode(BaseNode[StateType], Generic[StateType, _OutputType], metac
|
|
48
48
|
"""
|
49
49
|
|
50
50
|
# The Jinja template to render.
|
51
|
-
template: ClassVar[str]
|
51
|
+
template: ClassVar[str] = ""
|
52
52
|
|
53
53
|
# The inputs to render the template with.
|
54
|
-
inputs: ClassVar[EntityInputsInterface]
|
54
|
+
inputs: ClassVar[EntityInputsInterface] = {}
|
55
55
|
|
56
56
|
jinja_globals: Dict[str, Any] = DEFAULT_JINJA_GLOBALS
|
57
57
|
jinja_custom_filters: Mapping[str, FilterFunc] = DEFAULT_JINJA_CUSTOM_FILTERS
|
vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py
CHANGED
@@ -1,4 +1,6 @@
|
|
1
1
|
import pytest
|
2
|
+
import json
|
3
|
+
from unittest import mock
|
2
4
|
from uuid import uuid4
|
3
5
|
from typing import Any, Iterator, List
|
4
6
|
|
@@ -7,6 +9,7 @@ from vellum import (
|
|
7
9
|
JinjaPromptBlock,
|
8
10
|
PlainTextPromptBlock,
|
9
11
|
PromptBlock,
|
12
|
+
PromptParameters,
|
10
13
|
RichTextPromptBlock,
|
11
14
|
VariablePromptBlock,
|
12
15
|
)
|
@@ -18,7 +21,11 @@ from vellum.client.types.prompt_request_string_input import PromptRequestStringI
|
|
18
21
|
from vellum.client.types.string_vellum_value import StringVellumValue
|
19
22
|
from vellum.workflows.errors import WorkflowErrorCode
|
20
23
|
from vellum.workflows.exceptions import NodeException
|
24
|
+
from vellum.workflows.inputs import BaseInputs
|
25
|
+
from vellum.workflows.nodes import InlinePromptNode
|
21
26
|
from vellum.workflows.nodes.displayable.bases.inline_prompt_node import BaseInlinePromptNode
|
27
|
+
from vellum.workflows.state import BaseState
|
28
|
+
from vellum.workflows.state.base import StateMeta
|
22
29
|
|
23
30
|
|
24
31
|
def test_validation_with_missing_variables():
|
@@ -180,3 +187,112 @@ def test_validation_with_extra_variables(vellum_adhoc_prompt_client):
|
|
180
187
|
PromptRequestStringInput(key="required_var", type="STRING", value="value"),
|
181
188
|
PromptRequestStringInput(key="extra_var", type="STRING", value="extra_value"),
|
182
189
|
]
|
190
|
+
|
191
|
+
|
192
|
+
def test_inline_prompt_node__json_output(vellum_adhoc_prompt_client):
|
193
|
+
"""Confirm that InlinePromptNodes output the expected JSON when run."""
|
194
|
+
|
195
|
+
# GIVEN a node that subclasses InlinePromptNode
|
196
|
+
class Inputs(BaseInputs):
|
197
|
+
input: str
|
198
|
+
|
199
|
+
class State(BaseState):
|
200
|
+
pass
|
201
|
+
|
202
|
+
class MyInlinePromptNode(InlinePromptNode):
|
203
|
+
ml_model = "gpt-4o"
|
204
|
+
blocks = []
|
205
|
+
parameters = PromptParameters(
|
206
|
+
stop=[],
|
207
|
+
temperature=0.0,
|
208
|
+
max_tokens=4096,
|
209
|
+
top_p=1.0,
|
210
|
+
top_k=0,
|
211
|
+
frequency_penalty=0.0,
|
212
|
+
presence_penalty=0.0,
|
213
|
+
logit_bias=None,
|
214
|
+
custom_parameters={
|
215
|
+
"json_mode": False,
|
216
|
+
"json_schema": {
|
217
|
+
"name": "get_result",
|
218
|
+
"schema": {
|
219
|
+
"type": "object",
|
220
|
+
"required": ["result"],
|
221
|
+
"properties": {"result": {"type": "string", "description": ""}},
|
222
|
+
},
|
223
|
+
},
|
224
|
+
},
|
225
|
+
)
|
226
|
+
|
227
|
+
# AND a known JSON response from invoking an inline prompt
|
228
|
+
expected_json = {"result": "Hello, world!"}
|
229
|
+
expected_outputs: List[PromptOutput] = [
|
230
|
+
StringVellumValue(value=json.dumps(expected_json)),
|
231
|
+
]
|
232
|
+
|
233
|
+
def generate_prompt_events(*args: Any, **kwargs: Any) -> Iterator[ExecutePromptEvent]:
|
234
|
+
execution_id = str(uuid4())
|
235
|
+
events: List[ExecutePromptEvent] = [
|
236
|
+
InitiatedExecutePromptEvent(execution_id=execution_id),
|
237
|
+
FulfilledExecutePromptEvent(
|
238
|
+
execution_id=execution_id,
|
239
|
+
outputs=expected_outputs,
|
240
|
+
),
|
241
|
+
]
|
242
|
+
yield from events
|
243
|
+
|
244
|
+
vellum_adhoc_prompt_client.adhoc_execute_prompt_stream.side_effect = generate_prompt_events
|
245
|
+
|
246
|
+
# WHEN the node is run
|
247
|
+
node = MyInlinePromptNode(
|
248
|
+
state=State(
|
249
|
+
meta=StateMeta(workflow_inputs=Inputs(input="Generate JSON.")),
|
250
|
+
)
|
251
|
+
)
|
252
|
+
outputs = [o for o in node.run()]
|
253
|
+
|
254
|
+
# THEN the node should have produced the outputs we expect
|
255
|
+
results_output = outputs[0]
|
256
|
+
assert results_output.name == "results"
|
257
|
+
assert results_output.value == expected_outputs
|
258
|
+
|
259
|
+
text_output = outputs[1]
|
260
|
+
assert text_output.name == "text"
|
261
|
+
assert text_output.value == '{"result": "Hello, world!"}'
|
262
|
+
|
263
|
+
json_output = outputs[2]
|
264
|
+
assert json_output.name == "json"
|
265
|
+
assert json_output.value == expected_json
|
266
|
+
|
267
|
+
# AND we should have made the expected call to Vellum search
|
268
|
+
vellum_adhoc_prompt_client.adhoc_execute_prompt_stream.assert_called_once_with(
|
269
|
+
blocks=[],
|
270
|
+
expand_meta=Ellipsis,
|
271
|
+
functions=None,
|
272
|
+
input_values=[],
|
273
|
+
input_variables=[],
|
274
|
+
ml_model="gpt-4o",
|
275
|
+
parameters=PromptParameters(
|
276
|
+
stop=[],
|
277
|
+
temperature=0.0,
|
278
|
+
max_tokens=4096,
|
279
|
+
top_p=1.0,
|
280
|
+
top_k=0,
|
281
|
+
frequency_penalty=0.0,
|
282
|
+
presence_penalty=0.0,
|
283
|
+
logit_bias=None,
|
284
|
+
custom_parameters={
|
285
|
+
"json_mode": False,
|
286
|
+
"json_schema": {
|
287
|
+
"name": "get_result",
|
288
|
+
"schema": {
|
289
|
+
"type": "object",
|
290
|
+
"required": ["result"],
|
291
|
+
"properties": {"result": {"type": "string", "description": ""}},
|
292
|
+
},
|
293
|
+
},
|
294
|
+
},
|
295
|
+
),
|
296
|
+
request_options=mock.ANY,
|
297
|
+
settings=None,
|
298
|
+
)
|
@@ -46,14 +46,31 @@ class InlinePromptNode(BaseInlinePromptNode[StateType]):
|
|
46
46
|
)
|
47
47
|
|
48
48
|
string_outputs = []
|
49
|
+
json_output = None
|
50
|
+
|
51
|
+
should_parse_json = False
|
52
|
+
if hasattr(self, "parameters"):
|
53
|
+
custom_params = self.parameters.custom_parameters
|
54
|
+
if custom_params and isinstance(custom_params, dict):
|
55
|
+
json_schema = custom_params.get("json_schema", {})
|
56
|
+
if (isinstance(json_schema, dict) and "schema" in json_schema) or custom_params.get("json_mode", {}):
|
57
|
+
should_parse_json = True
|
58
|
+
|
49
59
|
for output in outputs:
|
50
60
|
if output.value is None:
|
51
61
|
continue
|
52
62
|
|
53
63
|
if output.type == "STRING":
|
54
64
|
string_outputs.append(output.value)
|
65
|
+
if should_parse_json:
|
66
|
+
try:
|
67
|
+
parsed_json = json.loads(output.value)
|
68
|
+
json_output = parsed_json
|
69
|
+
except (json.JSONDecodeError, TypeError):
|
70
|
+
pass
|
55
71
|
elif output.type == "JSON":
|
56
72
|
string_outputs.append(json.dumps(output.value, indent=4))
|
73
|
+
json_output = output.value
|
57
74
|
elif output.type == "FUNCTION_CALL":
|
58
75
|
string_outputs.append(output.value.model_dump_json(indent=4))
|
59
76
|
else:
|
@@ -61,3 +78,6 @@ class InlinePromptNode(BaseInlinePromptNode[StateType]):
|
|
61
78
|
|
62
79
|
value = "\n".join(string_outputs)
|
63
80
|
yield BaseOutput(name="text", value=value)
|
81
|
+
|
82
|
+
if json_output:
|
83
|
+
yield BaseOutput(name="json", value=json_output)
|
@@ -43,7 +43,7 @@ from vellum.workflows.events.workflow import (
|
|
43
43
|
WorkflowExecutionSnapshottedEvent,
|
44
44
|
WorkflowExecutionStreamingBody,
|
45
45
|
)
|
46
|
-
from vellum.workflows.exceptions import NodeException
|
46
|
+
from vellum.workflows.exceptions import NodeException, WorkflowInitializationException
|
47
47
|
from vellum.workflows.nodes.bases import BaseNode
|
48
48
|
from vellum.workflows.nodes.bases.base import NodeRunResponse
|
49
49
|
from vellum.workflows.nodes.mocks import MockNodeExecutionArg
|
@@ -332,6 +332,18 @@ class WorkflowRunner(Generic[StateType]):
|
|
332
332
|
parent=parent_context,
|
333
333
|
)
|
334
334
|
)
|
335
|
+
except WorkflowInitializationException as e:
|
336
|
+
self._workflow_event_inner_queue.put(
|
337
|
+
NodeExecutionRejectedEvent(
|
338
|
+
trace_id=node.state.meta.trace_id,
|
339
|
+
span_id=span_id,
|
340
|
+
body=NodeExecutionRejectedBody(
|
341
|
+
node_definition=node.__class__,
|
342
|
+
error=e.error,
|
343
|
+
),
|
344
|
+
parent=parent_context,
|
345
|
+
)
|
346
|
+
)
|
335
347
|
except Exception as e:
|
336
348
|
logger.exception(f"An unexpected error occurred while running node {node.__class__.__name__}")
|
337
349
|
|
@@ -563,6 +575,9 @@ class WorkflowRunner(Generic[StateType]):
|
|
563
575
|
except NodeException as e:
|
564
576
|
self._workflow_event_outer_queue.put(self._reject_workflow_event(e.error))
|
565
577
|
return
|
578
|
+
except WorkflowInitializationException as e:
|
579
|
+
self._workflow_event_outer_queue.put(self._reject_workflow_event(e.error))
|
580
|
+
return
|
566
581
|
except Exception:
|
567
582
|
err_message = f"An unexpected error occurred while initializing node {node_cls.__name__}"
|
568
583
|
logger.exception(err_message)
|
@@ -39,7 +39,7 @@ vellum_ee/workflows/display/nodes/vellum/conditional_node.py,sha256=ybLIa4uclqVI
|
|
39
39
|
vellum_ee/workflows/display/nodes/vellum/error_node.py,sha256=I1Jkp2htRINJATtv1e-zs9BrReFX842djpiVgBPHDYg,2186
|
40
40
|
vellum_ee/workflows/display/nodes/vellum/final_output_node.py,sha256=p-PvlnxpBQ7IKskZi2A19jKAtKnSxJ8LPbGMA83VkFk,2805
|
41
41
|
vellum_ee/workflows/display/nodes/vellum/guardrail_node.py,sha256=aYZSJTxknU4LMiQdWk9LcK6CkhdozeDEMiRxfAyUNEc,2202
|
42
|
-
vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=
|
42
|
+
vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=grb2iDxZ8DiNlo1nfO3KPxs8saM_JZAskOFp-C4ZEq4,8908
|
43
43
|
vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py,sha256=MU9I8CB1X1TgL1aa1eT6DHWwNJ-2v79t74xl0oy-fBo,5510
|
44
44
|
vellum_ee/workflows/display/nodes/vellum/map_node.py,sha256=8CPnn06HIBxBOiECevUffeVmQmCpec6WtPQnNl9gj9Y,3748
|
45
45
|
vellum_ee/workflows/display/nodes/vellum/merge_node.py,sha256=HkNMgdQELiON42jdO-xDLmqrEKdGx1RVqrz2DXNTLS8,3239
|
@@ -48,15 +48,16 @@ vellum_ee/workflows/display/nodes/vellum/prompt_deployment_node.py,sha256=LFjLUr
|
|
48
48
|
vellum_ee/workflows/display/nodes/vellum/retry_node.py,sha256=LgokATi7sSS38Fil-XjqoR4t7AMOJ-GzXRw6p606Svo,3397
|
49
49
|
vellum_ee/workflows/display/nodes/vellum/search_node.py,sha256=TxcAGZDl_hvJ7Y1hUi9YVEVrj9Ie0hKkASdpfRL4_cs,9227
|
50
50
|
vellum_ee/workflows/display/nodes/vellum/subworkflow_deployment_node.py,sha256=62baAElKoRKIoba0lLhnrXGWWx96B73VxKGxh7BaIxc,2612
|
51
|
-
vellum_ee/workflows/display/nodes/vellum/templating_node.py,sha256=
|
51
|
+
vellum_ee/workflows/display/nodes/vellum/templating_node.py,sha256=5EWzdA3TSUPlbJhs4uo1KGH6eGtDXZucVhlUoc_xpl4,3242
|
52
52
|
vellum_ee/workflows/display/nodes/vellum/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
53
53
|
vellum_ee/workflows/display/nodes/vellum/tests/test_error_node.py,sha256=ulrpoYUW-5kIxfG4Lf5F2p0k_EoYKhmahEbF3P_eruM,1648
|
54
54
|
vellum_ee/workflows/display/nodes/vellum/tests/test_prompt_node.py,sha256=bg9INsXiWfyK047u8TD1oEOFYrqDq8GC7Hvgz69n7BE,1988
|
55
55
|
vellum_ee/workflows/display/nodes/vellum/tests/test_retry_node.py,sha256=NuIw8Yb42KUdoGi3Ur8_7VPg50IC4hNrwAkCociwqNk,2091
|
56
|
+
vellum_ee/workflows/display/nodes/vellum/tests/test_templating_node.py,sha256=Us32jf_FQnLuT4Bs2o5JyHxihCTAN8ozZghWIR0pl9k,3459
|
56
57
|
vellum_ee/workflows/display/nodes/vellum/tests/test_try_node.py,sha256=mtzB8LJlFCHVFM4H5AanLp29gQfaVmnN4A4iaRGJHoI,2427
|
57
58
|
vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py,sha256=4YUaTeD_OWF-UaPMyOTBTu9skGC1jgSHlAYrzbH7Z04,5039
|
58
59
|
vellum_ee/workflows/display/nodes/vellum/try_node.py,sha256=0ipBZMYm521tuwefQFgHOvTyTgiWVTkzxpQtnnmOAI0,4203
|
59
|
-
vellum_ee/workflows/display/nodes/vellum/utils.py,sha256=
|
60
|
+
vellum_ee/workflows/display/nodes/vellum/utils.py,sha256=lY3YsvzawDy15-8w3Brb4oSnVpVaMe5FQtgMi3BE3Mo,4737
|
60
61
|
vellum_ee/workflows/display/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
61
62
|
vellum_ee/workflows/display/tests/test_vellum_workflow_display.py,sha256=VD-4USiRlCcdC3Qe9WfdkxwFdircai0vqvuZCbELR84,9556
|
62
63
|
vellum_ee/workflows/display/tests/workflow_serialization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -81,7 +82,7 @@ vellum_ee/workflows/display/tests/workflow_serialization/test_basic_merge_node_s
|
|
81
82
|
vellum_ee/workflows/display/tests/workflow_serialization/test_basic_prompt_deployment_serialization.py,sha256=-bGp82C5_XgcVxeM80zWL9viogxvE4DGk7-5H__wwxA,8649
|
82
83
|
vellum_ee/workflows/display/tests/workflow_serialization/test_basic_search_node_serialization.py,sha256=elc4ak2OyI4y5P6GPnK2RwuZLcx4FL6hHlmhY3ARk6k,13008
|
83
84
|
vellum_ee/workflows/display/tests/workflow_serialization/test_basic_subworkflow_deployment_serialization.py,sha256=BzFNl9ECeGh0krm-CUjbBQQq0g7krANsp0Sh-j5dAkc,11322
|
84
|
-
vellum_ee/workflows/display/tests/workflow_serialization/test_basic_templating_node_serialization.py,sha256=
|
85
|
+
vellum_ee/workflows/display/tests/workflow_serialization/test_basic_templating_node_serialization.py,sha256=qjaXGw0JTnPnQmDAFm7jkB3bVkApf-WQ0Gd6kptqdPA,7818
|
85
86
|
vellum_ee/workflows/display/tests/workflow_serialization/test_basic_terminal_node_serialization.py,sha256=NdhE3lm7RMQ8DqkraPSq24IbOxNla9unbs4tsMWRzm4,3781
|
86
87
|
vellum_ee/workflows/display/tests/workflow_serialization/test_basic_try_node_serialization.py,sha256=eD5686C9nWC5s6t08vbAnm9qf9t53gYQM-E1FwAa75c,3035
|
87
88
|
vellum_ee/workflows/display/tests/workflow_serialization/test_complex_terminal_node_serialization.py,sha256=huKAOeMJ2MKmp6XtbvMJTUadqynoV40Ypoz9jsBEBEQ,7431
|
@@ -89,11 +90,11 @@ vellum_ee/workflows/display/types.py,sha256=ixfmcQn51Rhsm4_0hWfG0_WpzLE89ZrDZpeY
|
|
89
90
|
vellum_ee/workflows/display/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
90
91
|
vellum_ee/workflows/display/utils/expressions.py,sha256=9FpOslDI-RCR5m4TgAu9KCHh4aTVnh7CHR2ykyMUDw0,1151
|
91
92
|
vellum_ee/workflows/display/utils/vellum.py,sha256=EVPQUSsZ3OIeLTEbV6LHPor37t9fnj9kJxDqP4PmTLQ,8234
|
92
|
-
vellum_ee/workflows/display/vellum.py,sha256=
|
93
|
+
vellum_ee/workflows/display/vellum.py,sha256=ROvXFoLmnEXmJJ_Nc0ZVz94LZf6opU4O1TWbGnAfHug,5028
|
93
94
|
vellum_ee/workflows/display/workflows/__init__.py,sha256=kapXsC67VJcgSuiBMa86FdePG5A9kMB5Pi4Uy1O2ob4,207
|
94
|
-
vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256
|
95
|
+
vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256=-14UlLZlmfWDqNtxhY9GxXlrlPcgzEpAIffd94QPEvU,19932
|
95
96
|
vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py,sha256=kp0u8LN_2IwshLrhMImhpZx1hRyAcD5gXY-kDuuaGMQ,1269
|
96
|
-
vellum_ee/workflows/display/workflows/tests/test_workflow_display.py,sha256=
|
97
|
+
vellum_ee/workflows/display/workflows/tests/test_workflow_display.py,sha256=Ny9VWjCxc1_w8Z5xiQEJmNmEwtjlDlxgFCrmCWSmFmA,8511
|
97
98
|
vellum_ee/workflows/display/workflows/vellum_workflow_display.py,sha256=mbAzCpswOek34ITeTkesbVreCXpulj4NFjIg3RcdVZ8,18243
|
98
99
|
vellum_ee/workflows/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
99
100
|
vellum_ee/workflows/server/virtual_file_loader.py,sha256=X_DdNK7MfyOjKWekk6YQpOSCT6klKcdjT6nVJcBH1sM,1481
|
@@ -116,7 +117,7 @@ vellum_ee/workflows/tests/local_workflow/nodes/__init__.py,sha256=1F6jxUpSKfPXPj
|
|
116
117
|
vellum_ee/workflows/tests/local_workflow/nodes/final_output.py,sha256=ZX7zBv87zirg0w9VKUW3QVDSdBLDqcqAMZjCL_oWbpU,297
|
117
118
|
vellum_ee/workflows/tests/local_workflow/nodes/templating_node.py,sha256=NQwFN61QkHfI3Vssz-B0NKGfupK8PU0FDSAIAhYBLi0,325
|
118
119
|
vellum_ee/workflows/tests/local_workflow/workflow.py,sha256=A4qOzOPNwePYxWbcAgIPLsmrVS_aVEZEc-wULSv787Q,393
|
119
|
-
vellum_ee/workflows/tests/test_display_meta.py,sha256=
|
120
|
+
vellum_ee/workflows/tests/test_display_meta.py,sha256=C25dErwghPNXio49pvSRxyOuc96srH6eYEwTAWdE2zY,2258
|
120
121
|
vellum_ee/workflows/tests/test_server.py,sha256=M6vvQ2hjIpDWtQdDM9EPbMvUrZ93niAuYnxMNJWOjPA,511
|
121
122
|
vellum_ee/workflows/tests/test_virtual_files.py,sha256=TJEcMR0v2S8CkloXNmCHA0QW0K6pYNGaIjraJz7sFvY,2762
|
122
123
|
vellum/__init__.py,sha256=Vh9jxpBVXB2fTCyoUMlLdGL1Ujf0zNNNvqhNu5II8KI,36466
|
@@ -124,7 +125,7 @@ vellum/client/README.md,sha256=JkCJjmMZl4jrPj46pkmL9dpK4gSzQQmP5I7z4aME4LY,4749
|
|
124
125
|
vellum/client/__init__.py,sha256=tKtdM1_GqmGq1gpi9ydWD_T-MM7fPn8QdHh8ww19cNI,117564
|
125
126
|
vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
|
126
127
|
vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
127
|
-
vellum/client/core/client_wrapper.py,sha256=
|
128
|
+
vellum/client/core/client_wrapper.py,sha256=uLm2G6JmYjhKSyX55XTjjoXumxodrRRDWg6-C5GmXLI,1869
|
128
129
|
vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
129
130
|
vellum/client/core/file.py,sha256=X9IbmkZmB2bB_DpmZAO3crWdXagOakAyn6UCOCImCPg,2322
|
130
131
|
vellum/client/core/http_client.py,sha256=R0pQpCppnEtxccGvXl4uJ76s7ro_65Fo_erlNNLp_AI,19228
|
@@ -1319,7 +1320,7 @@ vellum/workflows/events/node.py,sha256=jbmNHjdp331Q1IRK-AWtAxwF6Lidb9R7__N5rQuil
|
|
1319
1320
|
vellum/workflows/events/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1320
1321
|
vellum/workflows/events/tests/test_event.py,sha256=sHcKhZPDPtzZfTmehL4NORA_StR4M6nZDcx9kz3Avo0,16866
|
1321
1322
|
vellum/workflows/events/types.py,sha256=AeTJaQt_fNHDLI4nyBzo7XrW9QQybRC09AKzu3kEYEE,3575
|
1322
|
-
vellum/workflows/events/workflow.py,sha256=
|
1323
|
+
vellum/workflows/events/workflow.py,sha256=xdqU6WOexaAqzJbU2Zw42o2LJhK7SDPtTFO5REGv17I,7293
|
1323
1324
|
vellum/workflows/exceptions.py,sha256=NiBiR3ggfmPxBVqD-H1SqmjI-7mIn0EStSN1BqApvCM,1213
|
1324
1325
|
vellum/workflows/expressions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1325
1326
|
vellum/workflows/expressions/accessor.py,sha256=ItZF7fMLzVTqsdAiaXb5SiDupXmX0X9xbIus1W6hRds,1870
|
@@ -1381,11 +1382,11 @@ vellum/workflows/nodes/core/map_node/node.py,sha256=dY27Xm11LHsqD7hnZnVYYDIazZ-X
|
|
1381
1382
|
vellum/workflows/nodes/core/map_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1382
1383
|
vellum/workflows/nodes/core/map_node/tests/test_node.py,sha256=uMR0AyIFn539LqTKHdwuBswnx1i-PHyqPpgtYrnmYMY,3496
|
1383
1384
|
vellum/workflows/nodes/core/retry_node/__init__.py,sha256=lN2bIy5a3Uzhs_FYCrooADyYU6ZGShtvLKFWpelwPvo,60
|
1384
|
-
vellum/workflows/nodes/core/retry_node/node.py,sha256=
|
1385
|
+
vellum/workflows/nodes/core/retry_node/node.py,sha256=SoDQaizjoj6kKLr454rkXu4bdzN7hXrBBn7T2_cp1CA,5261
|
1385
1386
|
vellum/workflows/nodes/core/retry_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1386
1387
|
vellum/workflows/nodes/core/retry_node/tests/test_node.py,sha256=fNgDufkIsrTC-6ftvogqSpWhqqBj9iNESdfK19B1Yx0,5159
|
1387
1388
|
vellum/workflows/nodes/core/templating_node/__init__.py,sha256=GmyuYo81_A1_Bz6id69ozVFS6FKiuDsZTiA3I6MaL2U,70
|
1388
|
-
vellum/workflows/nodes/core/templating_node/node.py,sha256
|
1389
|
+
vellum/workflows/nodes/core/templating_node/node.py,sha256=iqBmr2i-f-BqhisNQJiDfewjol0ur7-XpupLStyMJsg,3731
|
1389
1390
|
vellum/workflows/nodes/core/templating_node/tests/test_templating_node.py,sha256=MHofz-BwAgt7EXkab8VIyacYznDEIJ7Er7MJUaxNQQo,9614
|
1390
1391
|
vellum/workflows/nodes/core/try_node/__init__.py,sha256=JVD4DrldTIqFQQFrubs9KtWCCc0YCAc7Fzol5ZWIWeM,56
|
1391
1392
|
vellum/workflows/nodes/core/try_node/node.py,sha256=RbxL0NRXS0IxRP0MJAnLABolF6dkwVniiqsagzy-lwk,4445
|
@@ -1405,7 +1406,7 @@ vellum/workflows/nodes/displayable/bases/inline_prompt_node/__init__.py,sha256=H
|
|
1405
1406
|
vellum/workflows/nodes/displayable/bases/inline_prompt_node/constants.py,sha256=fnjiRWLoRlC4Puo5oQcpZD5Hd-EesxsAo9l5tGAkpZQ,270
|
1406
1407
|
vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py,sha256=fTQ7KS5u7xCO_nJsWT5_LWgwGnYZg4U06slvXxHAzTU,8449
|
1407
1408
|
vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1408
|
-
vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py,sha256=
|
1409
|
+
vellum/workflows/nodes/displayable/bases/inline_prompt_node/tests/test_inline_prompt_node.py,sha256=ZCXCZs-_OyPk4nqCpuWY-vw87lg92TDZ2tK_gckJ7mg,10450
|
1409
1410
|
vellum/workflows/nodes/displayable/bases/prompt_deployment_node.py,sha256=UIyNUUr8ii6rIx1gQL5vIPqTAww4ExxxJkFMoglc8LE,5794
|
1410
1411
|
vellum/workflows/nodes/displayable/bases/search_node.py,sha256=3UtbqY3QO4kzfJHbmUNZGnEEfJmaoiF892u8H6TGjp8,5381
|
1411
1412
|
vellum/workflows/nodes/displayable/bases/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -1428,7 +1429,7 @@ vellum/workflows/nodes/displayable/final_output_node/tests/test_node.py,sha256=E
|
|
1428
1429
|
vellum/workflows/nodes/displayable/guardrail_node/__init__.py,sha256=Ab5eXmOoBhyV4dMWdzh32HLUmnPIBEK_zFCT38C4Fng,68
|
1429
1430
|
vellum/workflows/nodes/displayable/guardrail_node/node.py,sha256=h5nIBzQxbXTrdTq1wjDcekk1RV4-rKUNCshqdBAiJJY,4025
|
1430
1431
|
vellum/workflows/nodes/displayable/inline_prompt_node/__init__.py,sha256=gSUOoEZLlrx35-tQhSAd3An8WDwBqyiQh-sIebLU9wU,74
|
1431
|
-
vellum/workflows/nodes/displayable/inline_prompt_node/node.py,sha256=
|
1432
|
+
vellum/workflows/nodes/displayable/inline_prompt_node/node.py,sha256=8RXZqWMzViUjFfbpmcy1gkSsKnEpci8BGwsuPYv4xMQ,3380
|
1432
1433
|
vellum/workflows/nodes/displayable/inline_prompt_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1433
1434
|
vellum/workflows/nodes/displayable/inline_prompt_node/tests/test_node.py,sha256=Qg1oGXecGH2Hp9oBAY42bTfKHBvNHHoJ6vUPPEj8Lq0,8539
|
1434
1435
|
vellum/workflows/nodes/displayable/merge_node/__init__.py,sha256=J8IC08dSH7P76wKlNuxe1sn7toNGtSQdFirUbtPDEs0,60
|
@@ -1481,7 +1482,7 @@ vellum/workflows/references/workflow_input.py,sha256=86IuhlBz-9cGxeUzizyjdp482aj
|
|
1481
1482
|
vellum/workflows/resolvers/__init__.py,sha256=eH6hTvZO4IciDaf_cf7aM2vs-DkBDyJPycOQevJxQnI,82
|
1482
1483
|
vellum/workflows/resolvers/base.py,sha256=WHra9LRtlTuB1jmuNqkfVE2JUgB61Cyntn8f0b0WZg4,411
|
1483
1484
|
vellum/workflows/runner/__init__.py,sha256=i1iG5sAhtpdsrlvwgH6B-m49JsINkiWyPWs8vyT-bqM,72
|
1484
|
-
vellum/workflows/runner/runner.py,sha256=
|
1485
|
+
vellum/workflows/runner/runner.py,sha256=pK_cRWS75x_sX7zrnltz3IykCTgoAWA7AW19Ih5BaB4,31759
|
1485
1486
|
vellum/workflows/sandbox.py,sha256=GVJzVjMuYzOBnSrboB0_6MMRZWBluAyQ2o7syeaeBd0,2235
|
1486
1487
|
vellum/workflows/state/__init__.py,sha256=yUUdR-_Vl7UiixNDYQZ-GEM_kJI9dnOia75TtuNEsnE,60
|
1487
1488
|
vellum/workflows/state/base.py,sha256=Vkhneko3VlQrPsMLU1PYSzXU_W1u7_AraJsghiv5O-4,15512
|
@@ -1517,8 +1518,8 @@ vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnad
|
|
1517
1518
|
vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1518
1519
|
vellum/workflows/workflows/tests/test_base_workflow.py,sha256=NRteiICyJvDM5zrtUfq2fZoXcGQVaWC9xmNlLLVW0cU,7979
|
1519
1520
|
vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
|
1520
|
-
vellum_ai-0.14.
|
1521
|
-
vellum_ai-0.14.
|
1522
|
-
vellum_ai-0.14.
|
1523
|
-
vellum_ai-0.14.
|
1524
|
-
vellum_ai-0.14.
|
1521
|
+
vellum_ai-0.14.18.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
|
1522
|
+
vellum_ai-0.14.18.dist-info/METADATA,sha256=jfwatBq_nGnqnh-_g3EkOaoxQtBIU5O4Gn7n-v3F_do,5408
|
1523
|
+
vellum_ai-0.14.18.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
1524
|
+
vellum_ai-0.14.18.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
|
1525
|
+
vellum_ai-0.14.18.dist-info/RECORD,,
|
@@ -19,7 +19,6 @@ _InlinePromptNodeType = TypeVar("_InlinePromptNodeType", bound=InlinePromptNode)
|
|
19
19
|
class BaseInlinePromptNodeDisplay(BaseNodeVellumDisplay[_InlinePromptNodeType], Generic[_InlinePromptNodeType]):
|
20
20
|
output_id: ClassVar[Optional[UUID]] = None
|
21
21
|
array_output_id: ClassVar[Optional[UUID]] = None
|
22
|
-
json_output_id: ClassVar[Optional[UUID]] = None
|
23
22
|
prompt_input_ids_by_name: ClassVar[Dict[str, UUID]] = {}
|
24
23
|
|
25
24
|
def serialize(
|
@@ -33,6 +32,7 @@ class BaseInlinePromptNodeDisplay(BaseNodeVellumDisplay[_InlinePromptNodeType],
|
|
33
32
|
|
34
33
|
_, output_display = display_context.global_node_output_displays[node.Outputs.text]
|
35
34
|
_, array_display = display_context.global_node_output_displays[node.Outputs.results]
|
35
|
+
_, json_display = display_context.global_node_output_displays[node.Outputs.json]
|
36
36
|
node_blocks = raise_if_descriptor(node.blocks)
|
37
37
|
function_definitions = raise_if_descriptor(node.functions)
|
38
38
|
|
@@ -71,6 +71,11 @@ class BaseInlinePromptNodeDisplay(BaseNodeVellumDisplay[_InlinePromptNodeType],
|
|
71
71
|
"display_data": self.get_display_data().dict(),
|
72
72
|
"base": self.get_base().dict(),
|
73
73
|
"definition": self.get_definition().dict(),
|
74
|
+
"outputs": [
|
75
|
+
{"id": str(json_display.id), "name": "json", "type": "JSON", "value": None},
|
76
|
+
{"id": str(output_display.id), "name": "text", "type": "STRING", "value": None},
|
77
|
+
{"id": str(array_display.id), "name": "results", "type": "ARRAY", "value": None},
|
78
|
+
],
|
74
79
|
}
|
75
80
|
|
76
81
|
def _generate_node_and_prompt_inputs(
|
@@ -1,5 +1,5 @@
|
|
1
1
|
from uuid import UUID
|
2
|
-
from typing import
|
2
|
+
from typing import Generic, Optional, TypeVar
|
3
3
|
|
4
4
|
from vellum.workflows.nodes.core.templating_node import TemplatingNode
|
5
5
|
from vellum.workflows.types.core import JsonObject
|
@@ -11,23 +11,21 @@ from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
|
11
11
|
|
12
12
|
_TemplatingNodeType = TypeVar("_TemplatingNodeType", bound=TemplatingNode)
|
13
13
|
|
14
|
-
TEMPLATE_INPUT_NAME =
|
14
|
+
TEMPLATE_INPUT_NAME = TemplatingNode.template.name
|
15
15
|
|
16
16
|
|
17
17
|
class BaseTemplatingNodeDisplay(BaseNodeVellumDisplay[_TemplatingNodeType], Generic[_TemplatingNodeType]):
|
18
|
-
template_input_id: ClassVar[Optional[UUID]] = None
|
19
|
-
|
20
18
|
def serialize(
|
21
19
|
self, display_context: WorkflowDisplayContext, error_output_id: Optional[UUID] = None, **kwargs
|
22
20
|
) -> JsonObject:
|
23
21
|
node = self._node
|
24
22
|
node_id = self.node_id
|
25
23
|
|
26
|
-
template_input_id = self.
|
24
|
+
template_input_id = self.node_input_ids_by_name.get(TEMPLATE_INPUT_NAME)
|
27
25
|
|
28
26
|
template_node_input = create_node_input(
|
29
27
|
node_id=node_id,
|
30
|
-
input_name=
|
28
|
+
input_name=TEMPLATE_INPUT_NAME,
|
31
29
|
value=node.template,
|
32
30
|
display_context=display_context,
|
33
31
|
input_id=template_input_id,
|
@@ -39,7 +37,8 @@ class BaseTemplatingNodeDisplay(BaseNodeVellumDisplay[_TemplatingNodeType], Gene
|
|
39
37
|
input_name=variable_name,
|
40
38
|
value=variable_value,
|
41
39
|
display_context=display_context,
|
42
|
-
input_id=self.node_input_ids_by_name.get(variable_name)
|
40
|
+
input_id=self.node_input_ids_by_name.get(f"{TemplatingNode.inputs.name}.{variable_name}")
|
41
|
+
or self.node_input_ids_by_name.get(variable_name),
|
43
42
|
)
|
44
43
|
for variable_name, variable_value in template_node_inputs.items()
|
45
44
|
if variable_name != TEMPLATE_INPUT_NAME
|
@@ -0,0 +1,97 @@
|
|
1
|
+
import pytest
|
2
|
+
from uuid import UUID
|
3
|
+
from typing import Type
|
4
|
+
|
5
|
+
from vellum.workflows import BaseWorkflow
|
6
|
+
from vellum.workflows.nodes.core.templating_node.node import TemplatingNode
|
7
|
+
from vellum_ee.workflows.display.nodes.vellum.templating_node import BaseTemplatingNodeDisplay
|
8
|
+
from vellum_ee.workflows.display.workflows.get_vellum_workflow_display_class import get_workflow_display
|
9
|
+
from vellum_ee.workflows.display.workflows.vellum_workflow_display import VellumWorkflowDisplay
|
10
|
+
|
11
|
+
|
12
|
+
def _no_display_class(Node: Type[TemplatingNode]):
|
13
|
+
return None
|
14
|
+
|
15
|
+
|
16
|
+
def _display_class_with_node_input_ids_by_name(Node: Type[TemplatingNode]):
|
17
|
+
class TemplatingNodeDisplay(BaseTemplatingNodeDisplay[Node]): # type: ignore[valid-type]
|
18
|
+
node_input_ids_by_name = {"foo": UUID("fba6a4d5-835a-4e99-afb7-f6a4aed15110")}
|
19
|
+
|
20
|
+
return TemplatingNodeDisplay
|
21
|
+
|
22
|
+
|
23
|
+
def _display_class_with_node_input_ids_by_name_with_inputs_prefix(Node: Type[TemplatingNode]):
|
24
|
+
class TemplatingNodeDisplay(BaseTemplatingNodeDisplay[Node]): # type: ignore[valid-type]
|
25
|
+
node_input_ids_by_name = {"inputs.foo": UUID("fba6a4d5-835a-4e99-afb7-f6a4aed15110")}
|
26
|
+
|
27
|
+
return TemplatingNodeDisplay
|
28
|
+
|
29
|
+
|
30
|
+
@pytest.mark.parametrize(
|
31
|
+
["GetDisplayClass", "expected_input_id"],
|
32
|
+
[
|
33
|
+
(_no_display_class, "d3519cec-590c-416d-8eb1-96051aed5ddd"),
|
34
|
+
(_display_class_with_node_input_ids_by_name, "fba6a4d5-835a-4e99-afb7-f6a4aed15110"),
|
35
|
+
(_display_class_with_node_input_ids_by_name_with_inputs_prefix, "fba6a4d5-835a-4e99-afb7-f6a4aed15110"),
|
36
|
+
],
|
37
|
+
ids=[
|
38
|
+
"no_display_class",
|
39
|
+
"display_class_with_node_input_ids_by_name",
|
40
|
+
"display_class_with_node_input_ids_by_name_with_inputs_prefix",
|
41
|
+
],
|
42
|
+
)
|
43
|
+
def test_serialize_node__templating_node_inputs(GetDisplayClass, expected_input_id):
|
44
|
+
# GIVEN a templating node with inputs
|
45
|
+
class MyTemplatingNode(TemplatingNode):
|
46
|
+
inputs = {"foo": "bar"}
|
47
|
+
|
48
|
+
# AND a workflow with the templating node
|
49
|
+
class Workflow(BaseWorkflow):
|
50
|
+
graph = MyTemplatingNode
|
51
|
+
|
52
|
+
# AND a display class
|
53
|
+
GetDisplayClass(MyTemplatingNode)
|
54
|
+
|
55
|
+
# WHEN the workflow is serialized
|
56
|
+
workflow_display = get_workflow_display(base_display_class=VellumWorkflowDisplay, workflow_class=Workflow)
|
57
|
+
serialized_workflow: dict = workflow_display.serialize()
|
58
|
+
|
59
|
+
# THEN the node should properly serialize the inputs
|
60
|
+
my_templating_node = next(
|
61
|
+
node for node in serialized_workflow["workflow_raw_data"]["nodes"] if node["type"] == "TEMPLATING"
|
62
|
+
)
|
63
|
+
|
64
|
+
assert my_templating_node["inputs"] == [
|
65
|
+
{
|
66
|
+
"id": "b7d971ee-9156-46b2-9bf0-9292875211f8",
|
67
|
+
"key": "template",
|
68
|
+
"value": {
|
69
|
+
"combinator": "OR",
|
70
|
+
"rules": [
|
71
|
+
{
|
72
|
+
"type": "CONSTANT_VALUE",
|
73
|
+
"data": {
|
74
|
+
"type": "STRING",
|
75
|
+
"value": "",
|
76
|
+
},
|
77
|
+
}
|
78
|
+
],
|
79
|
+
},
|
80
|
+
},
|
81
|
+
{
|
82
|
+
"id": expected_input_id,
|
83
|
+
"key": "foo",
|
84
|
+
"value": {
|
85
|
+
"combinator": "OR",
|
86
|
+
"rules": [
|
87
|
+
{
|
88
|
+
"type": "CONSTANT_VALUE",
|
89
|
+
"data": {
|
90
|
+
"type": "STRING",
|
91
|
+
"value": "bar",
|
92
|
+
},
|
93
|
+
}
|
94
|
+
],
|
95
|
+
},
|
96
|
+
},
|
97
|
+
]
|
@@ -54,7 +54,7 @@ def create_node_input_value_pointer_rules(
|
|
54
54
|
|
55
55
|
if isinstance(value, BaseDescriptor):
|
56
56
|
if isinstance(value, NodeReference):
|
57
|
-
if
|
57
|
+
if value.instance is None:
|
58
58
|
raise ValueError(f"Expected NodeReference {value.name} to have an instance")
|
59
59
|
value = cast(BaseDescriptor, value.instance)
|
60
60
|
|
@@ -1,11 +1,10 @@
|
|
1
1
|
from dataclasses import dataclass, field
|
2
|
-
from enum import Enum
|
3
2
|
from uuid import UUID
|
4
3
|
from typing import List, Literal, Optional, Union
|
5
4
|
|
6
5
|
from pydantic import Field
|
7
6
|
|
8
|
-
from vellum import
|
7
|
+
from vellum import VellumVariableType
|
9
8
|
from vellum.client.types.array_vellum_value import ArrayVellumValue
|
10
9
|
from vellum.client.types.vellum_value import VellumValue
|
11
10
|
from vellum.core import UniversalBaseModel
|
@@ -138,24 +137,6 @@ class WorkflowOutputVellumDisplay(WorkflowOutputVellumDisplayOverrides):
|
|
138
137
|
pass
|
139
138
|
|
140
139
|
|
141
|
-
class WorkflowNodeType(str, Enum):
|
142
|
-
PROMPT = "PROMPT"
|
143
|
-
TEMPLATING = "TEMPLATING"
|
144
|
-
NOTE = "NOTE"
|
145
|
-
CODE_EXECUTION = "CODE_EXECUTION"
|
146
|
-
METRIC = "METRIC"
|
147
|
-
SEARCH = "SEARCH"
|
148
|
-
WEBHOOK = "WEBHOOK"
|
149
|
-
MERGE = "MERGE"
|
150
|
-
CONDITIONAL = "CONDITIONAL"
|
151
|
-
API = "API"
|
152
|
-
ENTRYPOINT = "ENTRYPOINT"
|
153
|
-
TERMINAL = "TERMINAL"
|
154
|
-
SUBWORKFLOW = "SUBWORKFLOW"
|
155
|
-
MAP = "MAP"
|
156
|
-
ERROR = "ERROR"
|
157
|
-
|
158
|
-
|
159
140
|
class ConstantValuePointer(UniversalBaseModel):
|
160
141
|
type: Literal["CONSTANT_VALUE"] = "CONSTANT_VALUE"
|
161
142
|
data: VellumValue
|
@@ -220,131 +201,3 @@ class NodeInput(UniversalBaseModel):
|
|
220
201
|
id: str
|
221
202
|
key: str
|
222
203
|
value: NodeInputValuePointer
|
223
|
-
|
224
|
-
|
225
|
-
class BaseWorkflowNode(UniversalBaseModel):
|
226
|
-
id: str
|
227
|
-
inputs: List[NodeInput]
|
228
|
-
type: str
|
229
|
-
display_data: Optional[NodeDisplayData] = None
|
230
|
-
base: CodeResourceDefinition
|
231
|
-
definition: CodeResourceDefinition
|
232
|
-
|
233
|
-
|
234
|
-
class EntrypointNodeData(UniversalBaseModel):
|
235
|
-
source_handle_id: str
|
236
|
-
|
237
|
-
|
238
|
-
class EntrypointNode(BaseWorkflowNode):
|
239
|
-
type: Literal[WorkflowNodeType.ENTRYPOINT] = WorkflowNodeType.ENTRYPOINT
|
240
|
-
data: EntrypointNodeData
|
241
|
-
|
242
|
-
|
243
|
-
class PromptTemplateBlockData(UniversalBaseModel):
|
244
|
-
version: Literal[1] = 1
|
245
|
-
# blocks: List[PromptBlockRequest]
|
246
|
-
|
247
|
-
|
248
|
-
class PromptVersionExecConfig(UniversalBaseModel):
|
249
|
-
parameters: PromptParameters
|
250
|
-
input_variables: List[VellumVariable]
|
251
|
-
prompt_template_block_data: PromptTemplateBlockData
|
252
|
-
|
253
|
-
|
254
|
-
class BasePromptNodeData(UniversalBaseModel):
|
255
|
-
label: str
|
256
|
-
output_id: str
|
257
|
-
error_output_id: Optional[str] = None
|
258
|
-
array_output_id: str
|
259
|
-
source_handle_id: str
|
260
|
-
target_handle_id: str
|
261
|
-
|
262
|
-
|
263
|
-
class InlinePromptNodeData(BasePromptNodeData):
|
264
|
-
variant: Literal["INLINE"] = "INLINE"
|
265
|
-
exec_config: PromptVersionExecConfig
|
266
|
-
ml_model_name: str
|
267
|
-
|
268
|
-
|
269
|
-
class DeploymentPromptNodeData(BasePromptNodeData):
|
270
|
-
variant: Literal["DEPLOYMENT"] = "DEPLOYMENT"
|
271
|
-
deployment_id: str
|
272
|
-
release_tag: str
|
273
|
-
|
274
|
-
|
275
|
-
PromptNodeData = Union[
|
276
|
-
InlinePromptNodeData,
|
277
|
-
DeploymentPromptNodeData,
|
278
|
-
]
|
279
|
-
|
280
|
-
|
281
|
-
class PromptNode(BaseWorkflowNode):
|
282
|
-
type: Literal[WorkflowNodeType.PROMPT] = WorkflowNodeType.PROMPT
|
283
|
-
data: PromptNodeData
|
284
|
-
|
285
|
-
|
286
|
-
class SearchNodeData(UniversalBaseModel):
|
287
|
-
label: str
|
288
|
-
|
289
|
-
results_output_id: str
|
290
|
-
text_output_id: str
|
291
|
-
error_output_id: Optional[str] = None
|
292
|
-
|
293
|
-
source_handle_id: str
|
294
|
-
target_handle_id: str
|
295
|
-
|
296
|
-
query_node_input_id: str
|
297
|
-
document_index_node_input_id: str
|
298
|
-
weights_node_input_id: str
|
299
|
-
limit_node_input_id: str
|
300
|
-
separator_node_input_id: str
|
301
|
-
result_merging_enabled_node_input_id: str
|
302
|
-
external_id_filters_node_input_id: str
|
303
|
-
metadata_filters_node_input_id: str
|
304
|
-
|
305
|
-
|
306
|
-
class SearchNode(BaseWorkflowNode):
|
307
|
-
type: Literal[WorkflowNodeType.SEARCH] = WorkflowNodeType.SEARCH
|
308
|
-
data: SearchNodeData
|
309
|
-
|
310
|
-
|
311
|
-
class FinalOutputNodeData(UniversalBaseModel):
|
312
|
-
label: str
|
313
|
-
name: str
|
314
|
-
target_handle_id: str
|
315
|
-
output_id: str
|
316
|
-
output_type: VellumVariableType
|
317
|
-
node_input_id: str
|
318
|
-
|
319
|
-
|
320
|
-
class FinalOutputNode(BaseWorkflowNode):
|
321
|
-
type: Literal[WorkflowNodeType.TERMINAL] = WorkflowNodeType.TERMINAL
|
322
|
-
data: FinalOutputNodeData
|
323
|
-
|
324
|
-
|
325
|
-
WorkflowNode = Union[
|
326
|
-
EntrypointNode,
|
327
|
-
PromptNode,
|
328
|
-
SearchNode,
|
329
|
-
FinalOutputNode,
|
330
|
-
]
|
331
|
-
|
332
|
-
|
333
|
-
class WorkflowEdge(UniversalBaseModel):
|
334
|
-
id: str
|
335
|
-
source_node_id: str
|
336
|
-
source_handle_id: str
|
337
|
-
target_node_id: str
|
338
|
-
target_handle_id: str
|
339
|
-
|
340
|
-
|
341
|
-
class WorkflowRawData(UniversalBaseModel):
|
342
|
-
nodes: List[WorkflowNode]
|
343
|
-
edges: List[WorkflowEdge]
|
344
|
-
display_data: Optional[WorkflowDisplayData] = None
|
345
|
-
|
346
|
-
|
347
|
-
class WorkflowVersionExecConfig(UniversalBaseModel):
|
348
|
-
workflow_raw_data: WorkflowRawData
|
349
|
-
input_variables: List[VellumVariable]
|
350
|
-
output_variables: List[VellumVariable]
|
@@ -429,7 +429,7 @@ class BaseWorkflowDisplay(
|
|
429
429
|
)
|
430
430
|
subworkflow_display_context = subworkflow_display.get_event_display_context()
|
431
431
|
|
432
|
-
node_event_displays[
|
432
|
+
node_event_displays[node_id] = NodeEventDisplayContext(
|
433
433
|
input_display=input_display,
|
434
434
|
output_display=output_display,
|
435
435
|
port_display=port_display_meta,
|
@@ -114,8 +114,8 @@ def test_get_event_display_context__node_display_filled_without_base_display():
|
|
114
114
|
display_context = VellumWorkflowDisplay(MyWorkflow).get_event_display_context()
|
115
115
|
|
116
116
|
# THEN the node display should be included
|
117
|
-
assert
|
118
|
-
node_event_display = display_context.node_displays[
|
117
|
+
assert StartNode.__id__ in display_context.node_displays
|
118
|
+
node_event_display = display_context.node_displays[StartNode.__id__]
|
119
119
|
|
120
120
|
# AND so should their output ids
|
121
121
|
assert StartNode.__output_ids__ == node_event_display.output_display
|
@@ -137,8 +137,8 @@ def test_get_event_display_context__node_display_filled_without_output_display()
|
|
137
137
|
display_context = VellumWorkflowDisplay(MyWorkflow).get_event_display_context()
|
138
138
|
|
139
139
|
# THEN the node display should be included
|
140
|
-
assert
|
141
|
-
node_event_display = display_context.node_displays[
|
140
|
+
assert StartNode.__id__ in display_context.node_displays
|
141
|
+
node_event_display = display_context.node_displays[StartNode.__id__]
|
142
142
|
|
143
143
|
# AND so should their output ids
|
144
144
|
assert node_event_display.output_display.keys() == {"foo"}
|
@@ -163,11 +163,11 @@ def test_get_event_display_context__node_display_to_include_subworkflow_display(
|
|
163
163
|
display_context = VellumWorkflowDisplay(MyWorkflow).get_event_display_context()
|
164
164
|
|
165
165
|
# THEN the subworkflow display should be included
|
166
|
-
assert
|
167
|
-
node_event_display = display_context.node_displays[
|
166
|
+
assert SubworkflowNode.__id__ in display_context.node_displays
|
167
|
+
node_event_display = display_context.node_displays[SubworkflowNode.__id__]
|
168
168
|
|
169
169
|
assert node_event_display.subworkflow_display is not None
|
170
|
-
assert
|
170
|
+
assert InnerNode.__id__ in node_event_display.subworkflow_display.node_displays
|
171
171
|
|
172
172
|
|
173
173
|
@pytest.mark.parametrize(
|
@@ -204,13 +204,13 @@ def test_get_event_display_context__node_display_for_adornment_nodes(
|
|
204
204
|
display_context = VellumWorkflowDisplay(MyWorkflow).get_event_display_context()
|
205
205
|
|
206
206
|
# THEN the subworkflow display should be included
|
207
|
-
assert
|
208
|
-
node_event_display = display_context.node_displays[
|
207
|
+
assert adornment_node_id in display_context.node_displays
|
208
|
+
node_event_display = display_context.node_displays[adornment_node_id]
|
209
209
|
assert node_event_display.subworkflow_display is not None
|
210
|
-
assert
|
210
|
+
assert inner_node_id in node_event_display.subworkflow_display.node_displays
|
211
211
|
|
212
212
|
# AND the inner node should have the correct outputs
|
213
|
-
inner_node_display = node_event_display.subworkflow_display.node_displays[
|
213
|
+
inner_node_display = node_event_display.subworkflow_display.node_displays[inner_node_id]
|
214
214
|
assert inner_node_display.output_display.keys() == {"foo"}
|
215
215
|
assert node_event_display.output_display.keys() == expected_adornment_output_names
|
216
216
|
|
@@ -231,7 +231,7 @@ def test_get_event_display_context__templating_node_input_display():
|
|
231
231
|
display_context = VellumWorkflowDisplay(MyWorkflow).get_event_display_context()
|
232
232
|
|
233
233
|
# THEN the subworkflow display should be included
|
234
|
-
assert
|
235
|
-
node_event_display = display_context.node_displays[
|
234
|
+
assert MyNode.__id__ in display_context.node_displays
|
235
|
+
node_event_display = display_context.node_displays[MyNode.__id__]
|
236
236
|
|
237
237
|
assert node_event_display.input_display.keys() == {"inputs.foo"}
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import pytest
|
2
2
|
import os
|
3
3
|
import sys
|
4
|
-
from uuid import
|
4
|
+
from uuid import uuid4
|
5
5
|
|
6
6
|
from vellum.workflows import BaseWorkflow
|
7
7
|
from vellum_ee.workflows.display.workflows import BaseWorkflowDisplay
|
@@ -35,22 +35,22 @@ def test_base_class_dynamic_import(files):
|
|
35
35
|
"node_displays": {
|
36
36
|
"533c6bd8-6088-4abc-a168-8c1758abcd33": {
|
37
37
|
"input_display": {
|
38
|
-
"example_var_1":
|
39
|
-
"template":
|
38
|
+
"example_var_1": "a0d1d7cf-242a-4bd9-a437-d308a7ced9b3",
|
39
|
+
"template": "f97d721a-e685-498e-90c3-9c3d9358fdad",
|
40
40
|
},
|
41
|
-
"output_display": {"result":
|
42
|
-
"port_display": {"default":
|
41
|
+
"output_display": {"result": "423bc529-1a1a-4f72-af4d-cbdb5f0a5929"},
|
42
|
+
"port_display": {"default": "afda9a19-0618-42e1-9b63-5d0db2a88f62"},
|
43
43
|
"subworkflow_display": None,
|
44
44
|
},
|
45
45
|
"f3ef4b2b-fec9-4026-9cc6-e5eac295307f": {
|
46
|
-
"input_display": {"node_input":
|
47
|
-
"output_display": {"value":
|
46
|
+
"input_display": {"node_input": "fe6cba85-2423-4b5e-8f85-06311a8be5fb"},
|
47
|
+
"output_display": {"value": "5469b810-6ea6-4362-9e79-e360d44a1405"},
|
48
48
|
"port_display": {},
|
49
49
|
"subworkflow_display": None,
|
50
50
|
},
|
51
51
|
},
|
52
|
-
"workflow_inputs": {"input_value":
|
53
|
-
"workflow_outputs": {"final_output":
|
52
|
+
"workflow_inputs": {"input_value": "2268a996-bd17-4832-b3ff-f5662d54b306"},
|
53
|
+
"workflow_outputs": {"final_output": "5469b810-6ea6-4362-9e79-e360d44a1405"},
|
54
54
|
}
|
55
55
|
assert display_meta
|
56
|
-
assert display_meta.
|
56
|
+
assert display_meta.model_dump(mode="json") == expected_result
|
File without changes
|
File without changes
|
File without changes
|