vellum-ai 0.14.40__py3-none-any.whl → 0.14.41__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/client/core/client_wrapper.py +1 -1
- vellum/client/reference.md +138 -1
- vellum/client/resources/ad_hoc/client.py +311 -1
- vellum/client/resources/deployments/client.py +2 -2
- vellum/workflows/nodes/experimental/tool_calling_node/node.py +6 -28
- vellum/workflows/nodes/experimental/tool_calling_node/utils.py +6 -10
- {vellum_ai-0.14.40.dist-info → vellum_ai-0.14.41.dist-info}/METADATA +1 -1
- {vellum_ai-0.14.40.dist-info → vellum_ai-0.14.41.dist-info}/RECORD +30 -31
- vellum_ee/workflows/display/nodes/base_node_display.py +35 -5
- vellum_ee/workflows/display/nodes/tests/test_base_node_display.py +18 -0
- vellum_ee/workflows/display/nodes/vellum/api_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/base_adornment_node.py +1 -2
- vellum_ee/workflows/display/nodes/vellum/code_execution_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/conditional_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/error_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/final_output_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/guardrail_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/merge_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/note_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/prompt_deployment_node.py +2 -4
- vellum_ee/workflows/display/nodes/vellum/search_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/subworkflow_deployment_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/templating_node.py +2 -2
- vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py +2 -2
- vellum_ee/workflows/display/workflows/base_workflow_display.py +4 -12
- vellum_ee/workflows/display/nodes/base_node_vellum_display.py +0 -40
- {vellum_ai-0.14.40.dist-info → vellum_ai-0.14.41.dist-info}/LICENSE +0 -0
- {vellum_ai-0.14.40.dist-info → vellum_ai-0.14.41.dist-info}/WHEEL +0 -0
- {vellum_ai-0.14.40.dist-info → vellum_ai-0.14.41.dist-info}/entry_points.txt +0 -0
@@ -18,7 +18,7 @@ class BaseClientWrapper:
|
|
18
18
|
headers: typing.Dict[str, str] = {
|
19
19
|
"X-Fern-Language": "Python",
|
20
20
|
"X-Fern-SDK-Name": "vellum-ai",
|
21
|
-
"X-Fern-SDK-Version": "0.14.
|
21
|
+
"X-Fern-SDK-Version": "0.14.41",
|
22
22
|
}
|
23
23
|
headers["X_API_KEY"] = self.api_key
|
24
24
|
return headers
|
vellum/client/reference.md
CHANGED
@@ -1206,6 +1206,143 @@ client.submit_workflow_execution_actuals(
|
|
1206
1206
|
</details>
|
1207
1207
|
|
1208
1208
|
## AdHoc
|
1209
|
+
<details><summary><code>client.ad_hoc.<a href="src/vellum/resources/ad_hoc/client.py">adhoc_execute_prompt</a>(...)</code></summary>
|
1210
|
+
<dl>
|
1211
|
+
<dd>
|
1212
|
+
|
1213
|
+
#### 🔌 Usage
|
1214
|
+
|
1215
|
+
<dl>
|
1216
|
+
<dd>
|
1217
|
+
|
1218
|
+
<dl>
|
1219
|
+
<dd>
|
1220
|
+
|
1221
|
+
```python
|
1222
|
+
from vellum import (
|
1223
|
+
JinjaPromptBlock,
|
1224
|
+
PromptParameters,
|
1225
|
+
PromptRequestStringInput,
|
1226
|
+
Vellum,
|
1227
|
+
VellumVariable,
|
1228
|
+
)
|
1229
|
+
|
1230
|
+
client = Vellum(
|
1231
|
+
api_key="YOUR_API_KEY",
|
1232
|
+
)
|
1233
|
+
client.ad_hoc.adhoc_execute_prompt(
|
1234
|
+
ml_model="ml_model",
|
1235
|
+
input_values=[
|
1236
|
+
PromptRequestStringInput(
|
1237
|
+
key="key",
|
1238
|
+
value="value",
|
1239
|
+
)
|
1240
|
+
],
|
1241
|
+
input_variables=[
|
1242
|
+
VellumVariable(
|
1243
|
+
id="id",
|
1244
|
+
key="key",
|
1245
|
+
type="STRING",
|
1246
|
+
)
|
1247
|
+
],
|
1248
|
+
parameters=PromptParameters(),
|
1249
|
+
blocks=[
|
1250
|
+
JinjaPromptBlock(
|
1251
|
+
template="template",
|
1252
|
+
)
|
1253
|
+
],
|
1254
|
+
)
|
1255
|
+
|
1256
|
+
```
|
1257
|
+
</dd>
|
1258
|
+
</dl>
|
1259
|
+
</dd>
|
1260
|
+
</dl>
|
1261
|
+
|
1262
|
+
#### ⚙️ Parameters
|
1263
|
+
|
1264
|
+
<dl>
|
1265
|
+
<dd>
|
1266
|
+
|
1267
|
+
<dl>
|
1268
|
+
<dd>
|
1269
|
+
|
1270
|
+
**ml_model:** `str`
|
1271
|
+
|
1272
|
+
</dd>
|
1273
|
+
</dl>
|
1274
|
+
|
1275
|
+
<dl>
|
1276
|
+
<dd>
|
1277
|
+
|
1278
|
+
**input_values:** `typing.Sequence[PromptRequestInput]`
|
1279
|
+
|
1280
|
+
</dd>
|
1281
|
+
</dl>
|
1282
|
+
|
1283
|
+
<dl>
|
1284
|
+
<dd>
|
1285
|
+
|
1286
|
+
**input_variables:** `typing.Sequence[VellumVariable]`
|
1287
|
+
|
1288
|
+
</dd>
|
1289
|
+
</dl>
|
1290
|
+
|
1291
|
+
<dl>
|
1292
|
+
<dd>
|
1293
|
+
|
1294
|
+
**parameters:** `PromptParameters`
|
1295
|
+
|
1296
|
+
</dd>
|
1297
|
+
</dl>
|
1298
|
+
|
1299
|
+
<dl>
|
1300
|
+
<dd>
|
1301
|
+
|
1302
|
+
**blocks:** `typing.Sequence[PromptBlock]`
|
1303
|
+
|
1304
|
+
</dd>
|
1305
|
+
</dl>
|
1306
|
+
|
1307
|
+
<dl>
|
1308
|
+
<dd>
|
1309
|
+
|
1310
|
+
**settings:** `typing.Optional[PromptSettings]`
|
1311
|
+
|
1312
|
+
</dd>
|
1313
|
+
</dl>
|
1314
|
+
|
1315
|
+
<dl>
|
1316
|
+
<dd>
|
1317
|
+
|
1318
|
+
**functions:** `typing.Optional[typing.Sequence[FunctionDefinition]]`
|
1319
|
+
|
1320
|
+
</dd>
|
1321
|
+
</dl>
|
1322
|
+
|
1323
|
+
<dl>
|
1324
|
+
<dd>
|
1325
|
+
|
1326
|
+
**expand_meta:** `typing.Optional[AdHocExpandMeta]`
|
1327
|
+
|
1328
|
+
</dd>
|
1329
|
+
</dl>
|
1330
|
+
|
1331
|
+
<dl>
|
1332
|
+
<dd>
|
1333
|
+
|
1334
|
+
**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
|
1335
|
+
|
1336
|
+
</dd>
|
1337
|
+
</dl>
|
1338
|
+
</dd>
|
1339
|
+
</dl>
|
1340
|
+
|
1341
|
+
|
1342
|
+
</dd>
|
1343
|
+
</dl>
|
1344
|
+
</details>
|
1345
|
+
|
1209
1346
|
<details><summary><code>client.ad_hoc.<a href="src/vellum/resources/ad_hoc/client.py">adhoc_execute_prompt_stream</a>(...)</code></summary>
|
1210
1347
|
<dl>
|
1211
1348
|
<dd>
|
@@ -2110,7 +2247,7 @@ client.deployments.update_deployment_release_tag(
|
|
2110
2247
|
<dl>
|
2111
2248
|
<dd>
|
2112
2249
|
|
2113
|
-
**history_item_id:** `typing.Optional[str]` — The ID of the
|
2250
|
+
**history_item_id:** `typing.Optional[str]` — The ID of the Release to tag
|
2114
2251
|
|
2115
2252
|
</dd>
|
2116
2253
|
</dl>
|
@@ -13,12 +13,12 @@ from ...core.request_options import RequestOptions
|
|
13
13
|
from ...types.ad_hoc_execute_prompt_event import AdHocExecutePromptEvent
|
14
14
|
from ...core.serialization import convert_and_respect_annotation_metadata
|
15
15
|
from ...core.pydantic_utilities import parse_obj_as
|
16
|
-
import json
|
17
16
|
from ...errors.bad_request_error import BadRequestError
|
18
17
|
from ...errors.forbidden_error import ForbiddenError
|
19
18
|
from ...errors.internal_server_error import InternalServerError
|
20
19
|
from json.decoder import JSONDecodeError
|
21
20
|
from ...core.api_error import ApiError
|
21
|
+
import json
|
22
22
|
from ...core.client_wrapper import AsyncClientWrapper
|
23
23
|
|
24
24
|
# this is used as the default value for optional parameters
|
@@ -29,6 +29,157 @@ class AdHocClient:
|
|
29
29
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
30
30
|
self._client_wrapper = client_wrapper
|
31
31
|
|
32
|
+
def adhoc_execute_prompt(
|
33
|
+
self,
|
34
|
+
*,
|
35
|
+
ml_model: str,
|
36
|
+
input_values: typing.Sequence[PromptRequestInput],
|
37
|
+
input_variables: typing.Sequence[VellumVariable],
|
38
|
+
parameters: PromptParameters,
|
39
|
+
blocks: typing.Sequence[PromptBlock],
|
40
|
+
settings: typing.Optional[PromptSettings] = OMIT,
|
41
|
+
functions: typing.Optional[typing.Sequence[FunctionDefinition]] = OMIT,
|
42
|
+
expand_meta: typing.Optional[AdHocExpandMeta] = OMIT,
|
43
|
+
request_options: typing.Optional[RequestOptions] = None,
|
44
|
+
) -> AdHocExecutePromptEvent:
|
45
|
+
"""
|
46
|
+
Parameters
|
47
|
+
----------
|
48
|
+
ml_model : str
|
49
|
+
|
50
|
+
input_values : typing.Sequence[PromptRequestInput]
|
51
|
+
|
52
|
+
input_variables : typing.Sequence[VellumVariable]
|
53
|
+
|
54
|
+
parameters : PromptParameters
|
55
|
+
|
56
|
+
blocks : typing.Sequence[PromptBlock]
|
57
|
+
|
58
|
+
settings : typing.Optional[PromptSettings]
|
59
|
+
|
60
|
+
functions : typing.Optional[typing.Sequence[FunctionDefinition]]
|
61
|
+
|
62
|
+
expand_meta : typing.Optional[AdHocExpandMeta]
|
63
|
+
|
64
|
+
request_options : typing.Optional[RequestOptions]
|
65
|
+
Request-specific configuration.
|
66
|
+
|
67
|
+
Returns
|
68
|
+
-------
|
69
|
+
AdHocExecutePromptEvent
|
70
|
+
|
71
|
+
|
72
|
+
Examples
|
73
|
+
--------
|
74
|
+
from vellum import (
|
75
|
+
JinjaPromptBlock,
|
76
|
+
PromptParameters,
|
77
|
+
PromptRequestStringInput,
|
78
|
+
Vellum,
|
79
|
+
VellumVariable,
|
80
|
+
)
|
81
|
+
|
82
|
+
client = Vellum(
|
83
|
+
api_key="YOUR_API_KEY",
|
84
|
+
)
|
85
|
+
client.ad_hoc.adhoc_execute_prompt(
|
86
|
+
ml_model="ml_model",
|
87
|
+
input_values=[
|
88
|
+
PromptRequestStringInput(
|
89
|
+
key="key",
|
90
|
+
value="value",
|
91
|
+
)
|
92
|
+
],
|
93
|
+
input_variables=[
|
94
|
+
VellumVariable(
|
95
|
+
id="id",
|
96
|
+
key="key",
|
97
|
+
type="STRING",
|
98
|
+
)
|
99
|
+
],
|
100
|
+
parameters=PromptParameters(),
|
101
|
+
blocks=[
|
102
|
+
JinjaPromptBlock(
|
103
|
+
template="template",
|
104
|
+
)
|
105
|
+
],
|
106
|
+
)
|
107
|
+
"""
|
108
|
+
_response = self._client_wrapper.httpx_client.request(
|
109
|
+
"v1/ad-hoc/execute-prompt",
|
110
|
+
base_url=self._client_wrapper.get_environment().default,
|
111
|
+
method="POST",
|
112
|
+
json={
|
113
|
+
"ml_model": ml_model,
|
114
|
+
"input_values": convert_and_respect_annotation_metadata(
|
115
|
+
object_=input_values, annotation=typing.Sequence[PromptRequestInput], direction="write"
|
116
|
+
),
|
117
|
+
"input_variables": convert_and_respect_annotation_metadata(
|
118
|
+
object_=input_variables, annotation=typing.Sequence[VellumVariable], direction="write"
|
119
|
+
),
|
120
|
+
"parameters": convert_and_respect_annotation_metadata(
|
121
|
+
object_=parameters, annotation=PromptParameters, direction="write"
|
122
|
+
),
|
123
|
+
"settings": convert_and_respect_annotation_metadata(
|
124
|
+
object_=settings, annotation=PromptSettings, direction="write"
|
125
|
+
),
|
126
|
+
"blocks": convert_and_respect_annotation_metadata(
|
127
|
+
object_=blocks, annotation=typing.Sequence[PromptBlock], direction="write"
|
128
|
+
),
|
129
|
+
"functions": convert_and_respect_annotation_metadata(
|
130
|
+
object_=functions, annotation=typing.Sequence[FunctionDefinition], direction="write"
|
131
|
+
),
|
132
|
+
"expand_meta": convert_and_respect_annotation_metadata(
|
133
|
+
object_=expand_meta, annotation=AdHocExpandMeta, direction="write"
|
134
|
+
),
|
135
|
+
},
|
136
|
+
request_options=request_options,
|
137
|
+
omit=OMIT,
|
138
|
+
)
|
139
|
+
try:
|
140
|
+
if 200 <= _response.status_code < 300:
|
141
|
+
return typing.cast(
|
142
|
+
AdHocExecutePromptEvent,
|
143
|
+
parse_obj_as(
|
144
|
+
type_=AdHocExecutePromptEvent, # type: ignore
|
145
|
+
object_=_response.json(),
|
146
|
+
),
|
147
|
+
)
|
148
|
+
if _response.status_code == 400:
|
149
|
+
raise BadRequestError(
|
150
|
+
typing.cast(
|
151
|
+
typing.Optional[typing.Any],
|
152
|
+
parse_obj_as(
|
153
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
154
|
+
object_=_response.json(),
|
155
|
+
),
|
156
|
+
)
|
157
|
+
)
|
158
|
+
if _response.status_code == 403:
|
159
|
+
raise ForbiddenError(
|
160
|
+
typing.cast(
|
161
|
+
typing.Optional[typing.Any],
|
162
|
+
parse_obj_as(
|
163
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
164
|
+
object_=_response.json(),
|
165
|
+
),
|
166
|
+
)
|
167
|
+
)
|
168
|
+
if _response.status_code == 500:
|
169
|
+
raise InternalServerError(
|
170
|
+
typing.cast(
|
171
|
+
typing.Optional[typing.Any],
|
172
|
+
parse_obj_as(
|
173
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
174
|
+
object_=_response.json(),
|
175
|
+
),
|
176
|
+
)
|
177
|
+
)
|
178
|
+
_response_json = _response.json()
|
179
|
+
except JSONDecodeError:
|
180
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
181
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
182
|
+
|
32
183
|
def adhoc_execute_prompt_stream(
|
33
184
|
self,
|
34
185
|
*,
|
@@ -195,6 +346,165 @@ class AsyncAdHocClient:
|
|
195
346
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
196
347
|
self._client_wrapper = client_wrapper
|
197
348
|
|
349
|
+
async def adhoc_execute_prompt(
|
350
|
+
self,
|
351
|
+
*,
|
352
|
+
ml_model: str,
|
353
|
+
input_values: typing.Sequence[PromptRequestInput],
|
354
|
+
input_variables: typing.Sequence[VellumVariable],
|
355
|
+
parameters: PromptParameters,
|
356
|
+
blocks: typing.Sequence[PromptBlock],
|
357
|
+
settings: typing.Optional[PromptSettings] = OMIT,
|
358
|
+
functions: typing.Optional[typing.Sequence[FunctionDefinition]] = OMIT,
|
359
|
+
expand_meta: typing.Optional[AdHocExpandMeta] = OMIT,
|
360
|
+
request_options: typing.Optional[RequestOptions] = None,
|
361
|
+
) -> AdHocExecutePromptEvent:
|
362
|
+
"""
|
363
|
+
Parameters
|
364
|
+
----------
|
365
|
+
ml_model : str
|
366
|
+
|
367
|
+
input_values : typing.Sequence[PromptRequestInput]
|
368
|
+
|
369
|
+
input_variables : typing.Sequence[VellumVariable]
|
370
|
+
|
371
|
+
parameters : PromptParameters
|
372
|
+
|
373
|
+
blocks : typing.Sequence[PromptBlock]
|
374
|
+
|
375
|
+
settings : typing.Optional[PromptSettings]
|
376
|
+
|
377
|
+
functions : typing.Optional[typing.Sequence[FunctionDefinition]]
|
378
|
+
|
379
|
+
expand_meta : typing.Optional[AdHocExpandMeta]
|
380
|
+
|
381
|
+
request_options : typing.Optional[RequestOptions]
|
382
|
+
Request-specific configuration.
|
383
|
+
|
384
|
+
Returns
|
385
|
+
-------
|
386
|
+
AdHocExecutePromptEvent
|
387
|
+
|
388
|
+
|
389
|
+
Examples
|
390
|
+
--------
|
391
|
+
import asyncio
|
392
|
+
|
393
|
+
from vellum import (
|
394
|
+
AsyncVellum,
|
395
|
+
JinjaPromptBlock,
|
396
|
+
PromptParameters,
|
397
|
+
PromptRequestStringInput,
|
398
|
+
VellumVariable,
|
399
|
+
)
|
400
|
+
|
401
|
+
client = AsyncVellum(
|
402
|
+
api_key="YOUR_API_KEY",
|
403
|
+
)
|
404
|
+
|
405
|
+
|
406
|
+
async def main() -> None:
|
407
|
+
await client.ad_hoc.adhoc_execute_prompt(
|
408
|
+
ml_model="ml_model",
|
409
|
+
input_values=[
|
410
|
+
PromptRequestStringInput(
|
411
|
+
key="key",
|
412
|
+
value="value",
|
413
|
+
)
|
414
|
+
],
|
415
|
+
input_variables=[
|
416
|
+
VellumVariable(
|
417
|
+
id="id",
|
418
|
+
key="key",
|
419
|
+
type="STRING",
|
420
|
+
)
|
421
|
+
],
|
422
|
+
parameters=PromptParameters(),
|
423
|
+
blocks=[
|
424
|
+
JinjaPromptBlock(
|
425
|
+
template="template",
|
426
|
+
)
|
427
|
+
],
|
428
|
+
)
|
429
|
+
|
430
|
+
|
431
|
+
asyncio.run(main())
|
432
|
+
"""
|
433
|
+
_response = await self._client_wrapper.httpx_client.request(
|
434
|
+
"v1/ad-hoc/execute-prompt",
|
435
|
+
base_url=self._client_wrapper.get_environment().default,
|
436
|
+
method="POST",
|
437
|
+
json={
|
438
|
+
"ml_model": ml_model,
|
439
|
+
"input_values": convert_and_respect_annotation_metadata(
|
440
|
+
object_=input_values, annotation=typing.Sequence[PromptRequestInput], direction="write"
|
441
|
+
),
|
442
|
+
"input_variables": convert_and_respect_annotation_metadata(
|
443
|
+
object_=input_variables, annotation=typing.Sequence[VellumVariable], direction="write"
|
444
|
+
),
|
445
|
+
"parameters": convert_and_respect_annotation_metadata(
|
446
|
+
object_=parameters, annotation=PromptParameters, direction="write"
|
447
|
+
),
|
448
|
+
"settings": convert_and_respect_annotation_metadata(
|
449
|
+
object_=settings, annotation=PromptSettings, direction="write"
|
450
|
+
),
|
451
|
+
"blocks": convert_and_respect_annotation_metadata(
|
452
|
+
object_=blocks, annotation=typing.Sequence[PromptBlock], direction="write"
|
453
|
+
),
|
454
|
+
"functions": convert_and_respect_annotation_metadata(
|
455
|
+
object_=functions, annotation=typing.Sequence[FunctionDefinition], direction="write"
|
456
|
+
),
|
457
|
+
"expand_meta": convert_and_respect_annotation_metadata(
|
458
|
+
object_=expand_meta, annotation=AdHocExpandMeta, direction="write"
|
459
|
+
),
|
460
|
+
},
|
461
|
+
request_options=request_options,
|
462
|
+
omit=OMIT,
|
463
|
+
)
|
464
|
+
try:
|
465
|
+
if 200 <= _response.status_code < 300:
|
466
|
+
return typing.cast(
|
467
|
+
AdHocExecutePromptEvent,
|
468
|
+
parse_obj_as(
|
469
|
+
type_=AdHocExecutePromptEvent, # type: ignore
|
470
|
+
object_=_response.json(),
|
471
|
+
),
|
472
|
+
)
|
473
|
+
if _response.status_code == 400:
|
474
|
+
raise BadRequestError(
|
475
|
+
typing.cast(
|
476
|
+
typing.Optional[typing.Any],
|
477
|
+
parse_obj_as(
|
478
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
479
|
+
object_=_response.json(),
|
480
|
+
),
|
481
|
+
)
|
482
|
+
)
|
483
|
+
if _response.status_code == 403:
|
484
|
+
raise ForbiddenError(
|
485
|
+
typing.cast(
|
486
|
+
typing.Optional[typing.Any],
|
487
|
+
parse_obj_as(
|
488
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
489
|
+
object_=_response.json(),
|
490
|
+
),
|
491
|
+
)
|
492
|
+
)
|
493
|
+
if _response.status_code == 500:
|
494
|
+
raise InternalServerError(
|
495
|
+
typing.cast(
|
496
|
+
typing.Optional[typing.Any],
|
497
|
+
parse_obj_as(
|
498
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
499
|
+
object_=_response.json(),
|
500
|
+
),
|
501
|
+
)
|
502
|
+
)
|
503
|
+
_response_json = _response.json()
|
504
|
+
except JSONDecodeError:
|
505
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
506
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
507
|
+
|
198
508
|
async def adhoc_execute_prompt_stream(
|
199
509
|
self,
|
200
510
|
*,
|
@@ -351,7 +351,7 @@ class DeploymentsClient:
|
|
351
351
|
The name of the Release Tag associated with this Deployment that you'd like to update.
|
352
352
|
|
353
353
|
history_item_id : typing.Optional[str]
|
354
|
-
The ID of the
|
354
|
+
The ID of the Release to tag
|
355
355
|
|
356
356
|
request_options : typing.Optional[RequestOptions]
|
357
357
|
Request-specific configuration.
|
@@ -895,7 +895,7 @@ class AsyncDeploymentsClient:
|
|
895
895
|
The name of the Release Tag associated with this Deployment that you'd like to update.
|
896
896
|
|
897
897
|
history_item_id : typing.Optional[str]
|
898
|
-
The ID of the
|
898
|
+
The ID of the Release to tag
|
899
899
|
|
900
900
|
request_options : typing.Optional[RequestOptions]
|
901
901
|
Request-specific configuration.
|
@@ -1,8 +1,7 @@
|
|
1
1
|
from collections.abc import Callable
|
2
|
-
from typing import Any, ClassVar,
|
2
|
+
from typing import Any, ClassVar, List, Optional
|
3
3
|
|
4
|
-
from vellum import ChatMessage,
|
5
|
-
from vellum.client.types.chat_message_request import ChatMessageRequest
|
4
|
+
from vellum import ChatMessage, PromptBlock
|
6
5
|
from vellum.workflows.context import execution_context, get_parent_context
|
7
6
|
from vellum.workflows.errors.types import WorkflowErrorCode
|
8
7
|
from vellum.workflows.exceptions import NodeException
|
@@ -35,8 +34,7 @@ class ToolCallingNode(BaseNode):
|
|
35
34
|
|
36
35
|
ml_model: ClassVar[str] = "gpt-4o-mini"
|
37
36
|
blocks: ClassVar[List[PromptBlock]] = []
|
38
|
-
functions: ClassVar[List[
|
39
|
-
function_callables: ClassVar[Dict[str, Callable[..., Any]]] = {}
|
37
|
+
functions: ClassVar[List[Callable[..., Any]]] = []
|
40
38
|
prompt_inputs: ClassVar[Optional[EntityInputsInterface]] = None
|
41
39
|
# TODO: https://linear.app/vellum/issue/APO-342/support-tool-call-max-retries
|
42
40
|
max_tool_calls: ClassVar[int] = 1
|
@@ -59,27 +57,13 @@ class ToolCallingNode(BaseNode):
|
|
59
57
|
This dynamically builds a graph with router and function nodes,
|
60
58
|
then executes the workflow.
|
61
59
|
"""
|
62
|
-
self._validate_functions()
|
63
|
-
|
64
|
-
initial_chat_history = []
|
65
|
-
|
66
|
-
# Extract chat history from prompt inputs if available
|
67
|
-
if self.prompt_inputs and "chat_history" in self.prompt_inputs:
|
68
|
-
chat_history_input = self.prompt_inputs["chat_history"]
|
69
|
-
if isinstance(chat_history_input, list) and all(
|
70
|
-
isinstance(msg, (ChatMessage, ChatMessageRequest)) for msg in chat_history_input
|
71
|
-
):
|
72
|
-
initial_chat_history = [
|
73
|
-
msg if isinstance(msg, ChatMessage) else ChatMessage.model_validate(msg.model_dump())
|
74
|
-
for msg in chat_history_input
|
75
|
-
]
|
76
60
|
|
77
61
|
self._build_graph()
|
78
62
|
|
79
63
|
with execution_context(parent_context=get_parent_context()):
|
80
64
|
|
81
65
|
class ToolCallingState(BaseState):
|
82
|
-
chat_history: List[ChatMessage] =
|
66
|
+
chat_history: List[ChatMessage] = []
|
83
67
|
|
84
68
|
class ToolCallingWorkflow(BaseWorkflow[BaseInputs, ToolCallingState]):
|
85
69
|
graph = self._graph
|
@@ -121,9 +105,8 @@ class ToolCallingNode(BaseNode):
|
|
121
105
|
)
|
122
106
|
|
123
107
|
self._function_nodes = {
|
124
|
-
function.
|
108
|
+
function.__name__: create_function_node(
|
125
109
|
function=function,
|
126
|
-
function_callable=cast(Callable[..., Any], self.function_callables[function.name]), # type: ignore
|
127
110
|
)
|
128
111
|
for function in self.functions
|
129
112
|
}
|
@@ -132,7 +115,7 @@ class ToolCallingNode(BaseNode):
|
|
132
115
|
|
133
116
|
# Add connections from ports of router to function nodes and back to router
|
134
117
|
for function_name, FunctionNodeClass in self._function_nodes.items():
|
135
|
-
router_port = getattr(self.tool_router_node.Ports, function_name)
|
118
|
+
router_port = getattr(self.tool_router_node.Ports, function_name)
|
136
119
|
edge_graph = router_port >> FunctionNodeClass >> self.tool_router_node
|
137
120
|
graph_set.add(edge_graph)
|
138
121
|
|
@@ -140,8 +123,3 @@ class ToolCallingNode(BaseNode):
|
|
140
123
|
graph_set.add(default_port)
|
141
124
|
|
142
125
|
self._graph = Graph.from_set(graph_set)
|
143
|
-
|
144
|
-
def _validate_functions(self) -> None:
|
145
|
-
for function in self.functions:
|
146
|
-
if function.name is None:
|
147
|
-
raise ValueError("Function name is required")
|
@@ -35,7 +35,7 @@ class ToolRouterNode(InlinePromptNode):
|
|
35
35
|
if function_call is not None:
|
36
36
|
self.state.chat_history.append(
|
37
37
|
ChatMessage(
|
38
|
-
role="
|
38
|
+
role="ASSISTANT",
|
39
39
|
content=FunctionCallChatMessageContent(
|
40
40
|
value=FunctionCallChatMessageContentValue(
|
41
41
|
name=function_call.name,
|
@@ -51,16 +51,12 @@ class ToolRouterNode(InlinePromptNode):
|
|
51
51
|
def create_tool_router_node(
|
52
52
|
ml_model: str,
|
53
53
|
blocks: List[PromptBlock],
|
54
|
-
functions: List[
|
54
|
+
functions: List[Callable[..., Any]],
|
55
55
|
prompt_inputs: Optional[EntityInputsInterface],
|
56
56
|
) -> Type[ToolRouterNode]:
|
57
57
|
Ports = type("Ports", (), {})
|
58
58
|
for function in functions:
|
59
|
-
|
60
|
-
# We should not raise an error here since we filter out functions without names
|
61
|
-
raise ValueError("Function name is required")
|
62
|
-
|
63
|
-
function_name = function.name
|
59
|
+
function_name = function.__name__
|
64
60
|
port_condition = LazyReference(
|
65
61
|
lambda: (
|
66
62
|
ToolRouterNode.Outputs.results[0]["type"].equals("FUNCTION_CALL")
|
@@ -98,7 +94,7 @@ def create_tool_router_node(
|
|
98
94
|
return node
|
99
95
|
|
100
96
|
|
101
|
-
def create_function_node(function:
|
97
|
+
def create_function_node(function: Callable[..., Any]) -> Type[FunctionNode]:
|
102
98
|
"""
|
103
99
|
Create a FunctionNode class for a given function.
|
104
100
|
|
@@ -113,14 +109,14 @@ def create_function_node(function: FunctionDefinition, function_callable: Callab
|
|
113
109
|
arguments = outputs["arguments"]
|
114
110
|
|
115
111
|
# Call the original function directly with the arguments
|
116
|
-
result =
|
112
|
+
result = function(**arguments)
|
117
113
|
|
118
114
|
self.state.chat_history.append(ChatMessage(role="FUNCTION", text=result))
|
119
115
|
|
120
116
|
return self.Outputs()
|
121
117
|
|
122
118
|
node = type(
|
123
|
-
f"FunctionNode_{function.
|
119
|
+
f"FunctionNode_{function.__name__}",
|
124
120
|
(FunctionNode,),
|
125
121
|
{
|
126
122
|
"function": function,
|
@@ -26,31 +26,30 @@ vellum_ee/workflows/display/base.py,sha256=EqlQFD56kpqMY02ZBJBQajzJKh33Dwi60Wo77
|
|
26
26
|
vellum_ee/workflows/display/editor/__init__.py,sha256=MSAgY91xCEg2neH5d8jXx5wRdR962ftZVa6vO9BGq9k,167
|
27
27
|
vellum_ee/workflows/display/editor/types.py,sha256=x-tOOCJ6CF4HmiKDfCmcc3bOVfc1EBlP5o6u5WEfLoY,567
|
28
28
|
vellum_ee/workflows/display/nodes/__init__.py,sha256=jI1aPBQf8DkmrYoZ4O-wR1duqZByOf5mDFmo_wFJPE4,307
|
29
|
-
vellum_ee/workflows/display/nodes/base_node_display.py,sha256=
|
30
|
-
vellum_ee/workflows/display/nodes/base_node_vellum_display.py,sha256=WXbfVoGHTfl_owas2WKHwcRhfLeu0cAI4qLmKxfxjOs,1707
|
29
|
+
vellum_ee/workflows/display/nodes/base_node_display.py,sha256=zCcCEzpTLXOWbe9zGFF_mWmouJ9zwnTY0dkjepbuZrs,22269
|
31
30
|
vellum_ee/workflows/display/nodes/get_node_display_class.py,sha256=5QuXpMth0HmZuC-e8LRKOLbrVXSL-ylMR5IWae8eNmc,2113
|
32
31
|
vellum_ee/workflows/display/nodes/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
33
|
-
vellum_ee/workflows/display/nodes/tests/test_base_node_display.py,sha256=
|
32
|
+
vellum_ee/workflows/display/nodes/tests/test_base_node_display.py,sha256=Z4Mf7xLCNiblSbpKI0BrV5modQr-ZcFzhfir_OSyTTs,2997
|
34
33
|
vellum_ee/workflows/display/nodes/types.py,sha256=St1BB6no528OyELGiyRabWao0GGw6mLhstQAvEACbGk,247
|
35
34
|
vellum_ee/workflows/display/nodes/utils.py,sha256=sloya5TpXsnot1HURc9L51INwflRqUzHxRVnCS9Cd-4,973
|
36
35
|
vellum_ee/workflows/display/nodes/vellum/__init__.py,sha256=nUIgH2s0-7IbQRNrBhLPyRNe8YIrx3Yo9HeeW-aXXFk,1668
|
37
|
-
vellum_ee/workflows/display/nodes/vellum/api_node.py,sha256=
|
38
|
-
vellum_ee/workflows/display/nodes/vellum/base_adornment_node.py,sha256=
|
39
|
-
vellum_ee/workflows/display/nodes/vellum/code_execution_node.py,sha256=
|
40
|
-
vellum_ee/workflows/display/nodes/vellum/conditional_node.py,sha256=
|
41
|
-
vellum_ee/workflows/display/nodes/vellum/error_node.py,sha256=
|
42
|
-
vellum_ee/workflows/display/nodes/vellum/final_output_node.py,sha256=
|
43
|
-
vellum_ee/workflows/display/nodes/vellum/guardrail_node.py,sha256=
|
44
|
-
vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=
|
45
|
-
vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py,sha256=
|
36
|
+
vellum_ee/workflows/display/nodes/vellum/api_node.py,sha256=ke2_t-E6BdsbZlYgAQCxCqbM01owtgKmI1H62AYGWw4,8519
|
37
|
+
vellum_ee/workflows/display/nodes/vellum/base_adornment_node.py,sha256=rJbHZBg9A_v2bjk-R6MfWzShcrS2gcKIOyYGoqwTx8s,6353
|
38
|
+
vellum_ee/workflows/display/nodes/vellum/code_execution_node.py,sha256=NMlgXgglewZ-k7DolHDAcUnOK68EetmI8EjhYI3ciRg,4391
|
39
|
+
vellum_ee/workflows/display/nodes/vellum/conditional_node.py,sha256=slsDIxkuKD2b3XptcGhd8aCKrvYqUSeNmkeCJ23CXXY,11085
|
40
|
+
vellum_ee/workflows/display/nodes/vellum/error_node.py,sha256=eJ_nw0u63vJ7fxxMCrfNyKx8JTOcRoujrJsvsxo_FJ4,2167
|
41
|
+
vellum_ee/workflows/display/nodes/vellum/final_output_node.py,sha256=WL5At9pRRODV9kOZnY2HtF1zkf7tig7O-x1BhPtI9S8,3109
|
42
|
+
vellum_ee/workflows/display/nodes/vellum/guardrail_node.py,sha256=Mc2QilBaGWinOoNItVkkeNkIrV2Yn8vaLN83z8ajC_0,2209
|
43
|
+
vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py,sha256=uprXxAph7oavh3ARupcZZ8Rvwv1KtzyUP-x0cSzWJC0,8824
|
44
|
+
vellum_ee/workflows/display/nodes/vellum/inline_subworkflow_node.py,sha256=5P5Y3fkRo9o7ITONY_T3Wibfx03z5BopNl7NG19hEOM,5491
|
46
45
|
vellum_ee/workflows/display/nodes/vellum/map_node.py,sha256=8CPnn06HIBxBOiECevUffeVmQmCpec6WtPQnNl9gj9Y,3748
|
47
|
-
vellum_ee/workflows/display/nodes/vellum/merge_node.py,sha256=
|
48
|
-
vellum_ee/workflows/display/nodes/vellum/note_node.py,sha256=
|
49
|
-
vellum_ee/workflows/display/nodes/vellum/prompt_deployment_node.py,sha256=
|
46
|
+
vellum_ee/workflows/display/nodes/vellum/merge_node.py,sha256=Qirs9cWx7LXKXgUtZBncZ1gou3EIMZ1m9146t9RCC3o,3213
|
47
|
+
vellum_ee/workflows/display/nodes/vellum/note_node.py,sha256=3E0UqmgVYdtbj4nyq8gKju8EpMsRHMCQ0KLrJhug3XU,1084
|
48
|
+
vellum_ee/workflows/display/nodes/vellum/prompt_deployment_node.py,sha256=mkvc8CPts1d9CZkubT1eg75grQjrmgngMy9Ll99a9ds,3238
|
50
49
|
vellum_ee/workflows/display/nodes/vellum/retry_node.py,sha256=TjBpRERmkmm1myZTWKAxxD1F0dWSc7U1sih8g9sqq2Q,3300
|
51
|
-
vellum_ee/workflows/display/nodes/vellum/search_node.py,sha256=
|
52
|
-
vellum_ee/workflows/display/nodes/vellum/subworkflow_deployment_node.py,sha256=
|
53
|
-
vellum_ee/workflows/display/nodes/vellum/templating_node.py,sha256=
|
50
|
+
vellum_ee/workflows/display/nodes/vellum/search_node.py,sha256=BgphkaSEeUeNXu71mSse-d0QHtXs6989BKEe9OtceGw,9260
|
51
|
+
vellum_ee/workflows/display/nodes/vellum/subworkflow_deployment_node.py,sha256=GI2d9W0S72NxZykdyCHDoCfgpiVye7g9fG3wcQ01O8Q,2593
|
52
|
+
vellum_ee/workflows/display/nodes/vellum/templating_node.py,sha256=otA0D2vXnfNIktjp4RvWL5pzsqSQiPeCinJy7weTg9g,3223
|
54
53
|
vellum_ee/workflows/display/nodes/vellum/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
55
54
|
vellum_ee/workflows/display/nodes/vellum/tests/test_code_execution_node.py,sha256=OoNO-BstB96F-VMK6FZ9aXyi-0wyNePo6HiaJC6SYRw,3880
|
56
55
|
vellum_ee/workflows/display/nodes/vellum/tests/test_error_node.py,sha256=HoIph_rNlHFNBMqBq1a_eyFwPzBIazdH1yQeRhgba14,1510
|
@@ -59,7 +58,7 @@ vellum_ee/workflows/display/nodes/vellum/tests/test_prompt_node.py,sha256=6Q6rfE
|
|
59
58
|
vellum_ee/workflows/display/nodes/vellum/tests/test_retry_node.py,sha256=h93ysolmbo2viisyhRnXKHPxiDK0I_dSAbYoHFYIoO4,1953
|
60
59
|
vellum_ee/workflows/display/nodes/vellum/tests/test_templating_node.py,sha256=PK7v0AWCgH9gWRfAwGPUGCRLbIH3NSyoAYa9W15ggAo,3321
|
61
60
|
vellum_ee/workflows/display/nodes/vellum/tests/test_try_node.py,sha256=Khjsb53PKpZuyhKoRMgKAL45eGp5hZqXvHmVeQWRw4w,2289
|
62
|
-
vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py,sha256=
|
61
|
+
vellum_ee/workflows/display/nodes/vellum/tests/test_utils.py,sha256=3LS1O4DGPWit05oj_ubeW8AlHGnoBxdUMferGQuAiZs,4851
|
63
62
|
vellum_ee/workflows/display/nodes/vellum/try_node.py,sha256=o6wL17XVm6Wj4sHoJkBW54Y5fY9ZwhD7ry0xkNWRu5c,4106
|
64
63
|
vellum_ee/workflows/display/nodes/vellum/utils.py,sha256=oICunzyaXPs0tYnW5zH1r93Bx35MSH7mcD-n0DEWRok,4978
|
65
64
|
vellum_ee/workflows/display/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -98,7 +97,7 @@ vellum_ee/workflows/display/utils/registry.py,sha256=fWIm5Jj-10gNFjgn34iBu4RWv3V
|
|
98
97
|
vellum_ee/workflows/display/utils/vellum.py,sha256=VSWB3RA3RWQwQ7nMsU9gLfgpX3_BK9ARBIT36i52r9k,9478
|
99
98
|
vellum_ee/workflows/display/vellum.py,sha256=o7mq_vk2Yapu9DDKRz5l76h8EmCAypWGQYe6pryrbB8,3576
|
100
99
|
vellum_ee/workflows/display/workflows/__init__.py,sha256=kapXsC67VJcgSuiBMa86FdePG5A9kMB5Pi4Uy1O2ob4,207
|
101
|
-
vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256=
|
100
|
+
vellum_ee/workflows/display/workflows/base_workflow_display.py,sha256=qjdATqHawQ1IWkyQ3aRlQQAFixNsS-X4ewpmKSnSwvI,32001
|
102
101
|
vellum_ee/workflows/display/workflows/get_vellum_workflow_display_class.py,sha256=s6yMO0RxoM8scmfT8TJ-9cwl-WHFe7JSyEJA0alCeEs,1913
|
103
102
|
vellum_ee/workflows/display/workflows/tests/test_workflow_display.py,sha256=uWBMmd_2BlDpymGoo5FB2LBeFQKHmPWJAPvSPZHk94o,11869
|
104
103
|
vellum_ee/workflows/display/workflows/vellum_workflow_display.py,sha256=aaKdmWrgEe5YyV4zuDY_4E3y-l59rIHQnNGiPj2OWxQ,359
|
@@ -131,7 +130,7 @@ vellum/client/README.md,sha256=JkCJjmMZl4jrPj46pkmL9dpK4gSzQQmP5I7z4aME4LY,4749
|
|
131
130
|
vellum/client/__init__.py,sha256=Z-JHK2jGxhtTtmkLeOaUGGJWIUNYGNVBLvUewC6lp6w,118148
|
132
131
|
vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
|
133
132
|
vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
134
|
-
vellum/client/core/client_wrapper.py,sha256=
|
133
|
+
vellum/client/core/client_wrapper.py,sha256=woJ1T678VkPNsh73oqH8PD5x3z8EX9S3qLVPszexkzc,1869
|
135
134
|
vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
136
135
|
vellum/client/core/file.py,sha256=X9IbmkZmB2bB_DpmZAO3crWdXagOakAyn6UCOCImCPg,2322
|
137
136
|
vellum/client/core/http_client.py,sha256=R0pQpCppnEtxccGvXl4uJ76s7ro_65Fo_erlNNLp_AI,19228
|
@@ -147,14 +146,14 @@ vellum/client/errors/bad_request_error.py,sha256=_EbO8mWqN9kFZPvIap8qa1lL_EWkRcs
|
|
147
146
|
vellum/client/errors/forbidden_error.py,sha256=QO1kKlhClAPES6zsEK7g9pglWnxn3KWaOCAawWOg6Aw,263
|
148
147
|
vellum/client/errors/internal_server_error.py,sha256=8USCagXyJJ1MOm9snpcXIUt6eNXvrd_aq7Gfcu1vlOI,268
|
149
148
|
vellum/client/errors/not_found_error.py,sha256=tBVCeBC8n3C811WHRj_n-hs3h8MqwR5gp0vLiobk7W8,262
|
150
|
-
vellum/client/reference.md,sha256=
|
149
|
+
vellum/client/reference.md,sha256=Nue7qK61ewnH9QTLxLOm9GLGPdBRqiWz1PTerxBiDyI,89717
|
151
150
|
vellum/client/resources/__init__.py,sha256=XgQao4rJxyYu71j64RFIsshz4op9GE8-i-C5GCv-KVE,1555
|
152
151
|
vellum/client/resources/ad_hoc/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
153
|
-
vellum/client/resources/ad_hoc/client.py,sha256=
|
152
|
+
vellum/client/resources/ad_hoc/client.py,sha256=93FXK-Wpvh72G8ji2__2Dmc5OYl9G5GRHiknyGIjeX4,25557
|
154
153
|
vellum/client/resources/container_images/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
155
154
|
vellum/client/resources/container_images/client.py,sha256=g8X3IkWFj43-sJMvirjDrh1-yjOFYV2C9xsGC20Xfjo,15003
|
156
155
|
vellum/client/resources/deployments/__init__.py,sha256=m64MNuPx3qVazOnTNwOY8oEeDrAkNwMJvUEe5xoMDvs,239
|
157
|
-
vellum/client/resources/deployments/client.py,sha256=
|
156
|
+
vellum/client/resources/deployments/client.py,sha256=_sK5DgxGTksPDDWONoOrNHpYcNyk6YuoFuwE9CKQm70,38745
|
158
157
|
vellum/client/resources/deployments/types/__init__.py,sha256=29GVdoLOJsADSSSqZwb6CQPeEmPjkKrbsWfru1bemj8,321
|
159
158
|
vellum/client/resources/deployments/types/deployments_list_request_status.py,sha256=CxlQD16KZXme7x31YYCe_3aAgEueutDTeJo5A4Au-aU,174
|
160
159
|
vellum/client/resources/deployments/types/list_deployment_release_tags_request_source.py,sha256=hRGgWMYZL9uKCmD_2dU8-u9RCPUUGItpNn1tUY-NXKY,180
|
@@ -1632,8 +1631,8 @@ vellum/workflows/nodes/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQ
|
|
1632
1631
|
vellum/workflows/nodes/experimental/openai_chat_completion_node/__init__.py,sha256=lsyD9laR9p7kx5-BXGH2gUTM242UhKy8SMV0SR6S2iE,90
|
1633
1632
|
vellum/workflows/nodes/experimental/openai_chat_completion_node/node.py,sha256=cKI2Ls25L-JVt4z4a2ozQa-YBeVy21Z7BQ32Sj7iBPE,10460
|
1634
1633
|
vellum/workflows/nodes/experimental/tool_calling_node/__init__.py,sha256=S7OzT3I4cyOU5Beoz87nPwCejCMP2FsHBFL8OcVmxJ4,118
|
1635
|
-
vellum/workflows/nodes/experimental/tool_calling_node/node.py,sha256=
|
1636
|
-
vellum/workflows/nodes/experimental/tool_calling_node/utils.py,sha256=
|
1634
|
+
vellum/workflows/nodes/experimental/tool_calling_node/node.py,sha256=w73v2pfpqFaTOYbtHkH7nFuGgqQebLO6dr_Mxn1n7Dc,4848
|
1635
|
+
vellum/workflows/nodes/experimental/tool_calling_node/utils.py,sha256=hwJ1GjoNOEWoCdT1R0b6gVRAAGWGTUlKutTgBF7GRP4,4664
|
1637
1636
|
vellum/workflows/nodes/mocks.py,sha256=a1FjWEIocseMfjzM-i8DNozpUsaW0IONRpZmXBoWlyc,10455
|
1638
1637
|
vellum/workflows/nodes/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1639
1638
|
vellum/workflows/nodes/tests/test_mocks.py,sha256=mfPvrs75PKcsNsbJLQAN6PDFoVqs9TmQxpdyFKDdO60,7837
|
@@ -1698,8 +1697,8 @@ vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnad
|
|
1698
1697
|
vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
1699
1698
|
vellum/workflows/workflows/tests/test_base_workflow.py,sha256=8P5YIsNMO78_CR1NNK6wkEdkMB4b3Q_Ni1qxh78OnHo,20481
|
1700
1699
|
vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
|
1701
|
-
vellum_ai-0.14.
|
1702
|
-
vellum_ai-0.14.
|
1703
|
-
vellum_ai-0.14.
|
1704
|
-
vellum_ai-0.14.
|
1705
|
-
vellum_ai-0.14.
|
1700
|
+
vellum_ai-0.14.41.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
|
1701
|
+
vellum_ai-0.14.41.dist-info/METADATA,sha256=KZufZ6Z5vdimywz8zeA6QW6OMaIa82TopA1biRjbVqE,5484
|
1702
|
+
vellum_ai-0.14.41.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
1703
|
+
vellum_ai-0.14.41.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
|
1704
|
+
vellum_ai-0.14.41.dist-info/RECORD,,
|
@@ -49,7 +49,7 @@ from vellum.workflows.expressions.or_ import OrExpression
|
|
49
49
|
from vellum.workflows.expressions.parse_json import ParseJsonExpression
|
50
50
|
from vellum.workflows.nodes.bases.base import BaseNode
|
51
51
|
from vellum.workflows.nodes.displayable.bases.utils import primitive_to_vellum_value
|
52
|
-
from vellum.workflows.nodes.utils import get_wrapped_node
|
52
|
+
from vellum.workflows.nodes.utils import get_unadorned_node, get_wrapped_node
|
53
53
|
from vellum.workflows.ports import Port
|
54
54
|
from vellum.workflows.references import OutputReference
|
55
55
|
from vellum.workflows.references.constant import ConstantValueReference
|
@@ -64,7 +64,7 @@ from vellum.workflows.types.utils import get_original_base
|
|
64
64
|
from vellum.workflows.utils.names import pascal_to_title_case
|
65
65
|
from vellum.workflows.utils.uuids import uuid4_from_hash
|
66
66
|
from vellum.workflows.utils.vellum_variables import primitive_type_to_vellum_variable_type
|
67
|
-
from vellum_ee.workflows.display.editor.types import NodeDisplayData
|
67
|
+
from vellum_ee.workflows.display.editor.types import NodeDisplayComment, NodeDisplayData
|
68
68
|
from vellum_ee.workflows.display.nodes.get_node_display_class import get_node_display_class
|
69
69
|
from vellum_ee.workflows.display.nodes.types import NodeOutputDisplay, PortDisplay, PortDisplayOverrides
|
70
70
|
from vellum_ee.workflows.display.utils.exceptions import UnsupportedSerializationException
|
@@ -223,7 +223,7 @@ class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
|
|
223
223
|
"id": str(node_id),
|
224
224
|
"label": node.__qualname__,
|
225
225
|
"type": "GENERIC",
|
226
|
-
"display_data": self.
|
226
|
+
"display_data": self.get_display_data().dict(),
|
227
227
|
"base": self.get_base().dict(),
|
228
228
|
"definition": self.get_definition().dict(),
|
229
229
|
"trigger": {
|
@@ -276,6 +276,13 @@ class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
|
|
276
276
|
|
277
277
|
return PortDisplay(id=port_id, node_id=self.node_id)
|
278
278
|
|
279
|
+
def get_source_handle_id(self, port_displays: Dict[Port, PortDisplay]) -> UUID:
|
280
|
+
unadorned_node = get_unadorned_node(self._node)
|
281
|
+
default_port = unadorned_node.Ports.default
|
282
|
+
|
283
|
+
default_port_display = port_displays[default_port]
|
284
|
+
return default_port_display.id
|
285
|
+
|
279
286
|
def get_trigger_id(self) -> UUID:
|
280
287
|
return self.get_target_handle_id()
|
281
288
|
|
@@ -363,9 +370,32 @@ class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
|
|
363
370
|
|
364
371
|
register_node_display_class(node_class=node_class, node_display_class=cls)
|
365
372
|
|
366
|
-
def
|
373
|
+
def get_display_data(self) -> NodeDisplayData:
|
367
374
|
explicit_value = self._get_explicit_node_display_attr("display_data", NodeDisplayData)
|
368
|
-
|
375
|
+
docstring = self._node.__doc__
|
376
|
+
|
377
|
+
if explicit_value and explicit_value.comment and docstring:
|
378
|
+
comment = (
|
379
|
+
NodeDisplayComment(value=docstring, expanded=explicit_value.comment.expanded)
|
380
|
+
if explicit_value.comment.expanded
|
381
|
+
else NodeDisplayComment(value=docstring)
|
382
|
+
)
|
383
|
+
return NodeDisplayData(
|
384
|
+
position=explicit_value.position,
|
385
|
+
width=explicit_value.width,
|
386
|
+
height=explicit_value.height,
|
387
|
+
comment=comment,
|
388
|
+
)
|
389
|
+
|
390
|
+
if explicit_value:
|
391
|
+
return explicit_value
|
392
|
+
|
393
|
+
if docstring:
|
394
|
+
return NodeDisplayData(
|
395
|
+
comment=NodeDisplayComment(value=docstring),
|
396
|
+
)
|
397
|
+
|
398
|
+
return NodeDisplayData()
|
369
399
|
|
370
400
|
def serialize_condition(self, display_context: "WorkflowDisplayContext", condition: BaseDescriptor) -> JsonObject:
|
371
401
|
if isinstance(
|
@@ -82,3 +82,21 @@ def test_serialize_condition__accessor_expression():
|
|
82
82
|
},
|
83
83
|
}
|
84
84
|
]
|
85
|
+
|
86
|
+
|
87
|
+
def test_serialize_display_data():
|
88
|
+
# GIVEN a node with an accessor expression in a Port
|
89
|
+
class MyNode(BaseNode):
|
90
|
+
"""I hope this works"""
|
91
|
+
|
92
|
+
pass
|
93
|
+
|
94
|
+
# WHEN we serialize the node
|
95
|
+
node_display_class = get_node_display_class(MyNode)
|
96
|
+
data = node_display_class().serialize(WorkflowDisplayContext())
|
97
|
+
|
98
|
+
# THEN the condition should be serialized correctly
|
99
|
+
assert data["display_data"] == {
|
100
|
+
"position": {"x": 0.0, "y": 0.0},
|
101
|
+
"comment": {"value": "I hope this works"},
|
102
|
+
}
|
@@ -4,7 +4,7 @@ from typing import Any, ClassVar, Dict, Generic, Optional, TypeVar, cast
|
|
4
4
|
from vellum.workflows.nodes.displayable import APINode
|
5
5
|
from vellum.workflows.references.output import OutputReference
|
6
6
|
from vellum.workflows.types.core import JsonArray, JsonObject
|
7
|
-
from vellum_ee.workflows.display.nodes.
|
7
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
8
8
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
9
9
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
10
10
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -13,7 +13,7 @@ from vellum_ee.workflows.display.utils.vellum import WorkspaceSecretPointer
|
|
13
13
|
_APINodeType = TypeVar("_APINodeType", bound=APINode)
|
14
14
|
|
15
15
|
|
16
|
-
class BaseAPINodeDisplay(
|
16
|
+
class BaseAPINodeDisplay(BaseNodeDisplay[_APINodeType], Generic[_APINodeType]):
|
17
17
|
# A mapping between node input keys and their ids for inputs representing additional header keys
|
18
18
|
additional_header_key_input_ids: ClassVar[Optional[Dict[str, UUID]]] = None
|
19
19
|
|
@@ -10,7 +10,6 @@ from vellum.workflows.types.core import JsonArray, JsonObject
|
|
10
10
|
from vellum.workflows.types.utils import get_original_base
|
11
11
|
from vellum.workflows.utils.uuids import uuid4_from_hash
|
12
12
|
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
13
|
-
from vellum_ee.workflows.display.nodes.base_node_vellum_display import BaseNodeVellumDisplay
|
14
13
|
from vellum_ee.workflows.display.nodes.get_node_display_class import get_node_display_class
|
15
14
|
from vellum_ee.workflows.display.nodes.types import NodeOutputDisplay
|
16
15
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -72,7 +71,7 @@ def _recursively_replace_wrapped_node(node_class: Type[BaseNode], wrapped_node_d
|
|
72
71
|
)
|
73
72
|
|
74
73
|
|
75
|
-
class BaseAdornmentNodeDisplay(
|
74
|
+
class BaseAdornmentNodeDisplay(BaseNodeDisplay[_BaseAdornmentNodeType], Generic[_BaseAdornmentNodeType]):
|
76
75
|
__wrapped_node_display__: Optional[Type[BaseNodeDisplay]] = None
|
77
76
|
|
78
77
|
def serialize(
|
@@ -6,7 +6,7 @@ from vellum.workflows.nodes.displayable.code_execution_node import CodeExecution
|
|
6
6
|
from vellum.workflows.nodes.displayable.code_execution_node.utils import read_file_from_path
|
7
7
|
from vellum.workflows.types.core import JsonObject
|
8
8
|
from vellum.workflows.utils.vellum_variables import primitive_type_to_vellum_variable_type
|
9
|
-
from vellum_ee.workflows.display.nodes.
|
9
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
10
10
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
11
11
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
12
12
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -14,7 +14,7 @@ from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
|
14
14
|
_CodeExecutionNodeType = TypeVar("_CodeExecutionNodeType", bound=CodeExecutionNode)
|
15
15
|
|
16
16
|
|
17
|
-
class BaseCodeExecutionNodeDisplay(
|
17
|
+
class BaseCodeExecutionNodeDisplay(BaseNodeDisplay[_CodeExecutionNodeType], Generic[_CodeExecutionNodeType]):
|
18
18
|
output_id: ClassVar[Optional[UUID]] = None
|
19
19
|
log_output_id: ClassVar[Optional[UUID]] = None
|
20
20
|
|
@@ -16,7 +16,7 @@ from vellum.workflows.expressions.or_ import OrExpression
|
|
16
16
|
from vellum.workflows.nodes.displayable import ConditionalNode
|
17
17
|
from vellum.workflows.types.core import ConditionType, JsonObject
|
18
18
|
from vellum.workflows.utils.uuids import uuid4_from_hash
|
19
|
-
from vellum_ee.workflows.display.nodes.
|
19
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
20
20
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
21
21
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
22
22
|
from vellum_ee.workflows.display.utils.vellum import convert_descriptor_to_operator
|
@@ -40,7 +40,7 @@ class ConditionId:
|
|
40
40
|
rule_group_id: Optional[str]
|
41
41
|
|
42
42
|
|
43
|
-
class BaseConditionalNodeDisplay(
|
43
|
+
class BaseConditionalNodeDisplay(BaseNodeDisplay[_ConditionalNodeType], Generic[_ConditionalNodeType]):
|
44
44
|
source_handle_ids: ClassVar[Dict[int, UUID]]
|
45
45
|
rule_ids: ClassVar[List[RuleIdMap]]
|
46
46
|
condition_ids: ClassVar[list[ConditionId]]
|
@@ -3,7 +3,7 @@ from typing import ClassVar, Generic, Optional, TypeVar
|
|
3
3
|
|
4
4
|
from vellum.workflows.nodes import ErrorNode
|
5
5
|
from vellum.workflows.types.core import JsonObject
|
6
|
-
from vellum_ee.workflows.display.nodes.
|
6
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
7
7
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
8
8
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
9
9
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -11,7 +11,7 @@ from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
|
11
11
|
_ErrorNodeType = TypeVar("_ErrorNodeType", bound=ErrorNode)
|
12
12
|
|
13
13
|
|
14
|
-
class BaseErrorNodeDisplay(
|
14
|
+
class BaseErrorNodeDisplay(BaseNodeDisplay[_ErrorNodeType], Generic[_ErrorNodeType]):
|
15
15
|
error_output_id: ClassVar[Optional[UUID]] = None
|
16
16
|
|
17
17
|
name: ClassVar[str] = "error-node"
|
@@ -4,7 +4,7 @@ from typing import Any, ClassVar, Generic, Optional, TypeVar
|
|
4
4
|
from vellum.workflows.nodes.displayable.final_output_node import FinalOutputNode
|
5
5
|
from vellum.workflows.types.core import JsonObject
|
6
6
|
from vellum.workflows.utils.uuids import uuid4_from_hash
|
7
|
-
from vellum_ee.workflows.display.nodes.
|
7
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
8
8
|
from vellum_ee.workflows.display.nodes.utils import to_kebab_case
|
9
9
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
10
10
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -13,7 +13,7 @@ from vellum_ee.workflows.display.utils.vellum import infer_vellum_variable_type
|
|
13
13
|
_FinalOutputNodeType = TypeVar("_FinalOutputNodeType", bound=FinalOutputNode)
|
14
14
|
|
15
15
|
|
16
|
-
class BaseFinalOutputNodeDisplay(
|
16
|
+
class BaseFinalOutputNodeDisplay(BaseNodeDisplay[_FinalOutputNodeType], Generic[_FinalOutputNodeType]):
|
17
17
|
output_id: ClassVar[Optional[UUID]] = None
|
18
18
|
output_name: ClassVar[Optional[str]] = None
|
19
19
|
node_input_id: ClassVar[Optional[UUID]] = None
|
@@ -3,7 +3,7 @@ from typing import Generic, Optional, TypeVar
|
|
3
3
|
|
4
4
|
from vellum.workflows.nodes import GuardrailNode
|
5
5
|
from vellum.workflows.types.core import JsonObject
|
6
|
-
from vellum_ee.workflows.display.nodes.
|
6
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
7
7
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
8
8
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
9
9
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -11,7 +11,7 @@ from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
|
11
11
|
_GuardrailNodeType = TypeVar("_GuardrailNodeType", bound=GuardrailNode)
|
12
12
|
|
13
13
|
|
14
|
-
class BaseGuardrailNodeDisplay(
|
14
|
+
class BaseGuardrailNodeDisplay(BaseNodeDisplay[_GuardrailNodeType], Generic[_GuardrailNodeType]):
|
15
15
|
def serialize(
|
16
16
|
self, display_context: WorkflowDisplayContext, error_output_id: Optional[UUID] = None, **kwargs
|
17
17
|
) -> JsonObject:
|
@@ -6,7 +6,7 @@ from vellum.workflows.nodes import InlinePromptNode
|
|
6
6
|
from vellum.workflows.types.core import JsonObject
|
7
7
|
from vellum.workflows.utils.functions import compile_function_definition
|
8
8
|
from vellum.workflows.utils.uuids import uuid4_from_hash
|
9
|
-
from vellum_ee.workflows.display.nodes.
|
9
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
10
10
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
11
11
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
12
12
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -16,7 +16,7 @@ from vellum_ee.workflows.display.vellum import NodeInput
|
|
16
16
|
_InlinePromptNodeType = TypeVar("_InlinePromptNodeType", bound=InlinePromptNode)
|
17
17
|
|
18
18
|
|
19
|
-
class BaseInlinePromptNodeDisplay(
|
19
|
+
class BaseInlinePromptNodeDisplay(BaseNodeDisplay[_InlinePromptNodeType], Generic[_InlinePromptNodeType]):
|
20
20
|
def serialize(
|
21
21
|
self, display_context: WorkflowDisplayContext, error_output_id: Optional[UUID] = None, **kwargs
|
22
22
|
) -> JsonObject:
|
@@ -5,7 +5,7 @@ from vellum import VellumVariable
|
|
5
5
|
from vellum.workflows.inputs.base import BaseInputs
|
6
6
|
from vellum.workflows.nodes import InlineSubworkflowNode
|
7
7
|
from vellum.workflows.types.core import JsonObject
|
8
|
-
from vellum_ee.workflows.display.nodes.
|
8
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
9
9
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
10
10
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
11
11
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -17,7 +17,7 @@ _InlineSubworkflowNodeType = TypeVar("_InlineSubworkflowNodeType", bound=InlineS
|
|
17
17
|
|
18
18
|
|
19
19
|
class BaseInlineSubworkflowNodeDisplay(
|
20
|
-
|
20
|
+
BaseNodeDisplay[_InlineSubworkflowNodeType], Generic[_InlineSubworkflowNodeType]
|
21
21
|
):
|
22
22
|
workflow_input_ids_by_name: ClassVar[Dict[str, UUID]] = {}
|
23
23
|
|
@@ -4,13 +4,13 @@ from typing import Any, ClassVar, Generic, List, Optional, TypeVar
|
|
4
4
|
from vellum.workflows.nodes.displayable import MergeNode
|
5
5
|
from vellum.workflows.types.core import JsonObject
|
6
6
|
from vellum.workflows.utils.uuids import uuid4_from_hash
|
7
|
-
from vellum_ee.workflows.display.nodes.
|
7
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
8
8
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
9
9
|
|
10
10
|
_MergeNodeType = TypeVar("_MergeNodeType", bound=MergeNode)
|
11
11
|
|
12
12
|
|
13
|
-
class BaseMergeNodeDisplay(
|
13
|
+
class BaseMergeNodeDisplay(BaseNodeDisplay[_MergeNodeType], Generic[_MergeNodeType]):
|
14
14
|
target_handle_ids: ClassVar[List[UUID]]
|
15
15
|
|
16
16
|
def __init__(self):
|
@@ -2,13 +2,13 @@ from typing import Any, ClassVar, Dict, Generic, TypeVar, Union
|
|
2
2
|
|
3
3
|
from vellum.workflows.nodes import NoteNode
|
4
4
|
from vellum.workflows.types.core import JsonObject
|
5
|
-
from vellum_ee.workflows.display.nodes.
|
5
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
6
6
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
7
7
|
|
8
8
|
_NoteNodeType = TypeVar("_NoteNodeType", bound=NoteNode)
|
9
9
|
|
10
10
|
|
11
|
-
class BaseNoteNodeDisplay(
|
11
|
+
class BaseNoteNodeDisplay(BaseNodeDisplay[_NoteNodeType], Generic[_NoteNodeType]):
|
12
12
|
text: ClassVar[str] = ""
|
13
13
|
style: ClassVar[Union[Dict[str, Any], None]] = None
|
14
14
|
|
@@ -5,7 +5,7 @@ from vellum.workflows.nodes.displayable.prompt_deployment_node import PromptDepl
|
|
5
5
|
from vellum.workflows.references import OutputReference
|
6
6
|
from vellum.workflows.types.core import JsonObject
|
7
7
|
from vellum.workflows.vellum_client import create_vellum_client
|
8
|
-
from vellum_ee.workflows.display.nodes.
|
8
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
9
9
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
10
10
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
11
11
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -13,9 +13,7 @@ from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
|
13
13
|
_PromptDeploymentNodeType = TypeVar("_PromptDeploymentNodeType", bound=PromptDeploymentNode)
|
14
14
|
|
15
15
|
|
16
|
-
class BasePromptDeploymentNodeDisplay(
|
17
|
-
BaseNodeVellumDisplay[_PromptDeploymentNodeType], Generic[_PromptDeploymentNodeType]
|
18
|
-
):
|
16
|
+
class BasePromptDeploymentNodeDisplay(BaseNodeDisplay[_PromptDeploymentNodeType], Generic[_PromptDeploymentNodeType]):
|
19
17
|
def serialize(
|
20
18
|
self, display_context: WorkflowDisplayContext, error_output_id: Optional[UUID] = None, **kwargs
|
21
19
|
) -> JsonObject:
|
@@ -11,7 +11,7 @@ from vellum.workflows.nodes.displayable.search_node import SearchNode
|
|
11
11
|
from vellum.workflows.references import OutputReference
|
12
12
|
from vellum.workflows.types.core import JsonArray, JsonObject
|
13
13
|
from vellum.workflows.utils.uuids import uuid4_from_hash
|
14
|
-
from vellum_ee.workflows.display.nodes.
|
14
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
15
15
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
16
16
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
17
17
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -28,7 +28,7 @@ class VariableIdMap:
|
|
28
28
|
rhs: Optional["VariableIdMap"]
|
29
29
|
|
30
30
|
|
31
|
-
class BaseSearchNodeDisplay(
|
31
|
+
class BaseSearchNodeDisplay(BaseNodeDisplay[_SearchNodeType], Generic[_SearchNodeType]):
|
32
32
|
# A mapping between the id of the operand (e.g. "lhs_variable_id" or "rhs_variable_id") and the id of the node input
|
33
33
|
# that the operand is pointing to.
|
34
34
|
metadata_filter_input_id_by_operand_id: Dict[UUID, UUID] = {}
|
@@ -4,7 +4,7 @@ from typing import Generic, Optional, TypeVar
|
|
4
4
|
from vellum.workflows.nodes import SubworkflowDeploymentNode
|
5
5
|
from vellum.workflows.types.core import JsonObject
|
6
6
|
from vellum.workflows.vellum_client import create_vellum_client
|
7
|
-
from vellum_ee.workflows.display.nodes.
|
7
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
8
8
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
9
9
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
10
10
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -13,7 +13,7 @@ _SubworkflowDeploymentNodeType = TypeVar("_SubworkflowDeploymentNodeType", bound
|
|
13
13
|
|
14
14
|
|
15
15
|
class BaseSubworkflowDeploymentNodeDisplay(
|
16
|
-
|
16
|
+
BaseNodeDisplay[_SubworkflowDeploymentNodeType], Generic[_SubworkflowDeploymentNodeType]
|
17
17
|
):
|
18
18
|
|
19
19
|
def serialize(
|
@@ -4,7 +4,7 @@ from typing import Generic, Optional, TypeVar
|
|
4
4
|
from vellum.workflows.nodes.core.templating_node import TemplatingNode
|
5
5
|
from vellum.workflows.types.core import JsonObject
|
6
6
|
from vellum.workflows.utils.vellum_variables import primitive_type_to_vellum_variable_type
|
7
|
-
from vellum_ee.workflows.display.nodes.
|
7
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
8
8
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
9
9
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input
|
10
10
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -14,7 +14,7 @@ _TemplatingNodeType = TypeVar("_TemplatingNodeType", bound=TemplatingNode)
|
|
14
14
|
TEMPLATE_INPUT_NAME = TemplatingNode.template.name
|
15
15
|
|
16
16
|
|
17
|
-
class BaseTemplatingNodeDisplay(
|
17
|
+
class BaseTemplatingNodeDisplay(BaseNodeDisplay[_TemplatingNodeType], Generic[_TemplatingNodeType]):
|
18
18
|
def serialize(
|
19
19
|
self, display_context: WorkflowDisplayContext, error_output_id: Optional[UUID] = None, **kwargs
|
20
20
|
) -> JsonObject:
|
@@ -9,7 +9,7 @@ from vellum.workflows.nodes.bases import BaseNode
|
|
9
9
|
from vellum.workflows.outputs import BaseOutputs
|
10
10
|
from vellum.workflows.references import LazyReference
|
11
11
|
from vellum_ee.workflows.display.editor.types import NodeDisplayData
|
12
|
-
from vellum_ee.workflows.display.nodes.
|
12
|
+
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
13
13
|
from vellum_ee.workflows.display.nodes.types import NodeOutputDisplay
|
14
14
|
from vellum_ee.workflows.display.nodes.vellum.utils import create_node_input_value_pointer_rules
|
15
15
|
from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
@@ -36,7 +36,7 @@ class MyNodeA(BaseNode):
|
|
36
36
|
output: str
|
37
37
|
|
38
38
|
|
39
|
-
class MyNodeADisplay(
|
39
|
+
class MyNodeADisplay(BaseNodeDisplay[MyNodeA]):
|
40
40
|
pass
|
41
41
|
|
42
42
|
|
@@ -31,7 +31,6 @@ from vellum_ee.workflows.display.base import (
|
|
31
31
|
)
|
32
32
|
from vellum_ee.workflows.display.editor.types import NodeDisplayData
|
33
33
|
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
34
|
-
from vellum_ee.workflows.display.nodes.base_node_vellum_display import BaseNodeVellumDisplay
|
35
34
|
from vellum_ee.workflows.display.nodes.get_node_display_class import get_node_display_class
|
36
35
|
from vellum_ee.workflows.display.nodes.types import NodeOutputDisplay, PortDisplay
|
37
36
|
from vellum_ee.workflows.display.nodes.utils import raise_if_descriptor
|
@@ -241,14 +240,9 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
|
|
241
240
|
)
|
242
241
|
|
243
242
|
if source_node_display:
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
)
|
248
|
-
else:
|
249
|
-
source_handle_id = source_node_display.get_node_port_display(
|
250
|
-
source_node_display._node.Ports.default
|
251
|
-
).id
|
243
|
+
source_handle_id = source_node_display.get_source_handle_id(
|
244
|
+
port_displays=self.display_context.port_displays
|
245
|
+
)
|
252
246
|
|
253
247
|
synthetic_output_edges.append(
|
254
248
|
{
|
@@ -617,9 +611,7 @@ class BaseWorkflowDisplay(Generic[WorkflowType]):
|
|
617
611
|
node_event_displays = {}
|
618
612
|
for node_id in node_displays:
|
619
613
|
node, current_node_display = node_displays[node_id]
|
620
|
-
input_display =
|
621
|
-
if isinstance(current_node_display, BaseNodeVellumDisplay):
|
622
|
-
input_display = current_node_display.node_input_ids_by_name
|
614
|
+
input_display = current_node_display.node_input_ids_by_name
|
623
615
|
output_display = {
|
624
616
|
output.name: current_node_display.output_display[output].id
|
625
617
|
for output in current_node_display.output_display
|
@@ -1,40 +0,0 @@
|
|
1
|
-
from uuid import UUID
|
2
|
-
from typing import ClassVar, Dict, Optional
|
3
|
-
|
4
|
-
from vellum.workflows.nodes.utils import get_unadorned_node
|
5
|
-
from vellum.workflows.ports import Port
|
6
|
-
from vellum.workflows.types.generics import NodeType
|
7
|
-
from vellum_ee.workflows.display.editor.types import NodeDisplayComment, NodeDisplayData
|
8
|
-
from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
|
9
|
-
from vellum_ee.workflows.display.nodes.types import PortDisplay
|
10
|
-
|
11
|
-
|
12
|
-
class BaseNodeVellumDisplay(BaseNodeDisplay[NodeType]):
|
13
|
-
# Used to explicitly set display data for a node
|
14
|
-
display_data: ClassVar[Optional[NodeDisplayData]] = None
|
15
|
-
|
16
|
-
def get_display_data(self) -> NodeDisplayData:
|
17
|
-
explicit_value = self._get_explicit_node_display_attr("display_data", NodeDisplayData)
|
18
|
-
docstring = self._node.__doc__
|
19
|
-
|
20
|
-
if explicit_value and explicit_value.comment and docstring:
|
21
|
-
comment = (
|
22
|
-
NodeDisplayComment(value=docstring, expanded=explicit_value.comment.expanded)
|
23
|
-
if explicit_value.comment.expanded
|
24
|
-
else NodeDisplayComment(value=docstring)
|
25
|
-
)
|
26
|
-
return NodeDisplayData(
|
27
|
-
position=explicit_value.position,
|
28
|
-
width=explicit_value.width,
|
29
|
-
height=explicit_value.height,
|
30
|
-
comment=comment,
|
31
|
-
)
|
32
|
-
|
33
|
-
return explicit_value if explicit_value else NodeDisplayData()
|
34
|
-
|
35
|
-
def get_source_handle_id(self, port_displays: Dict[Port, PortDisplay]) -> UUID:
|
36
|
-
unadorned_node = get_unadorned_node(self._node)
|
37
|
-
default_port = unadorned_node.Ports.default
|
38
|
-
|
39
|
-
default_port_display = port_displays[default_port]
|
40
|
-
return default_port_display.id
|
File without changes
|
File without changes
|
File without changes
|