vellum-ai 0.6.1__py3-none-any.whl → 0.6.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vellum/__init__.py +14 -0
- vellum/client.py +3 -0
- vellum/core/client_wrapper.py +1 -1
- vellum/resources/__init__.py +2 -0
- vellum/resources/deployments/client.py +249 -0
- vellum/resources/sandboxes/client.py +167 -0
- vellum/resources/workflow_deployments/client.py +252 -0
- vellum/resources/workflow_sandboxes/__init__.py +2 -0
- vellum/resources/workflow_sandboxes/client.py +192 -0
- vellum/types/__init__.py +12 -0
- vellum/types/deployment_release_tag_deployment_history_item.py +26 -0
- vellum/types/deployment_release_tag_read.py +43 -0
- vellum/types/release_tag_source.py +5 -0
- vellum/types/subworkflow_node_result.py +3 -0
- vellum/types/subworkflow_node_result_data.py +25 -0
- vellum/types/workflow_release_tag_read.py +43 -0
- vellum/types/workflow_release_tag_workflow_deployment_history_item.py +33 -0
- {vellum_ai-0.6.1.dist-info → vellum_ai-0.6.3.dist-info}/METADATA +1 -1
- {vellum_ai-0.6.1.dist-info → vellum_ai-0.6.3.dist-info}/RECORD +21 -13
- {vellum_ai-0.6.1.dist-info → vellum_ai-0.6.3.dist-info}/LICENSE +0 -0
- {vellum_ai-0.6.1.dist-info → vellum_ai-0.6.3.dist-info}/WHEEL +0 -0
@@ -12,8 +12,12 @@ from ...core.remove_none_from_dict import remove_none_from_dict
|
|
12
12
|
from ...core.request_options import RequestOptions
|
13
13
|
from ...types.paginated_slim_workflow_deployment_list import PaginatedSlimWorkflowDeploymentList
|
14
14
|
from ...types.workflow_deployment_read import WorkflowDeploymentRead
|
15
|
+
from ...types.workflow_release_tag_read import WorkflowReleaseTagRead
|
15
16
|
from .types.workflow_deployments_list_request_status import WorkflowDeploymentsListRequestStatus
|
16
17
|
|
18
|
+
# this is used as the default value for optional parameters
|
19
|
+
OMIT = typing.cast(typing.Any, ...)
|
20
|
+
|
17
21
|
|
18
22
|
class WorkflowDeploymentsClient:
|
19
23
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
@@ -135,6 +139,130 @@ class WorkflowDeploymentsClient:
|
|
135
139
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
136
140
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
137
141
|
|
142
|
+
def retrieve_workflow_release_tag(
|
143
|
+
self, id: str, name: str, *, request_options: typing.Optional[RequestOptions] = None
|
144
|
+
) -> WorkflowReleaseTagRead:
|
145
|
+
"""
|
146
|
+
Retrieve a Workflow Release Tag by tag name, associated with a specified Workflow Deployment.
|
147
|
+
|
148
|
+
Parameters:
|
149
|
+
- id: str. A UUID string identifying this workflow deployment.
|
150
|
+
|
151
|
+
- name: str. The name of the Release Tag associated with this Workflow Deployment that you'd like to retrieve.
|
152
|
+
|
153
|
+
- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
|
154
|
+
---
|
155
|
+
from vellum.client import Vellum
|
156
|
+
|
157
|
+
client = Vellum(
|
158
|
+
api_key="YOUR_API_KEY",
|
159
|
+
)
|
160
|
+
client.workflow_deployments.retrieve_workflow_release_tag(
|
161
|
+
id="id",
|
162
|
+
name="name",
|
163
|
+
)
|
164
|
+
"""
|
165
|
+
_response = self._client_wrapper.httpx_client.request(
|
166
|
+
method="GET",
|
167
|
+
url=urllib.parse.urljoin(
|
168
|
+
f"{self._client_wrapper.get_environment().default}/",
|
169
|
+
f"v1/workflow-deployments/{jsonable_encoder(id)}/release-tags/{jsonable_encoder(name)}",
|
170
|
+
),
|
171
|
+
params=jsonable_encoder(
|
172
|
+
request_options.get("additional_query_parameters") if request_options is not None else None
|
173
|
+
),
|
174
|
+
headers=jsonable_encoder(
|
175
|
+
remove_none_from_dict(
|
176
|
+
{
|
177
|
+
**self._client_wrapper.get_headers(),
|
178
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
179
|
+
}
|
180
|
+
)
|
181
|
+
),
|
182
|
+
timeout=request_options.get("timeout_in_seconds")
|
183
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
184
|
+
else self._client_wrapper.get_timeout(),
|
185
|
+
retries=0,
|
186
|
+
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
|
187
|
+
)
|
188
|
+
if 200 <= _response.status_code < 300:
|
189
|
+
return pydantic_v1.parse_obj_as(WorkflowReleaseTagRead, _response.json()) # type: ignore
|
190
|
+
try:
|
191
|
+
_response_json = _response.json()
|
192
|
+
except JSONDecodeError:
|
193
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
194
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
195
|
+
|
196
|
+
def update_workflow_release_tag(
|
197
|
+
self,
|
198
|
+
id: str,
|
199
|
+
name: str,
|
200
|
+
*,
|
201
|
+
history_item_id: typing.Optional[str] = OMIT,
|
202
|
+
request_options: typing.Optional[RequestOptions] = None,
|
203
|
+
) -> WorkflowReleaseTagRead:
|
204
|
+
"""
|
205
|
+
Updates an existing Release Tag associated with the specified Workflow Deployment.
|
206
|
+
|
207
|
+
Parameters:
|
208
|
+
- id: str. A UUID string identifying this workflow deployment.
|
209
|
+
|
210
|
+
- name: str. The name of the Release Tag associated with this Workflow Deployment that you'd like to update.
|
211
|
+
|
212
|
+
- history_item_id: typing.Optional[str]. The ID of the Workflow Deployment History Item to tag
|
213
|
+
|
214
|
+
- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
|
215
|
+
---
|
216
|
+
from vellum.client import Vellum
|
217
|
+
|
218
|
+
client = Vellum(
|
219
|
+
api_key="YOUR_API_KEY",
|
220
|
+
)
|
221
|
+
client.workflow_deployments.update_workflow_release_tag(
|
222
|
+
id="id",
|
223
|
+
name="name",
|
224
|
+
)
|
225
|
+
"""
|
226
|
+
_request: typing.Dict[str, typing.Any] = {}
|
227
|
+
if history_item_id is not OMIT:
|
228
|
+
_request["history_item_id"] = history_item_id
|
229
|
+
_response = self._client_wrapper.httpx_client.request(
|
230
|
+
method="PATCH",
|
231
|
+
url=urllib.parse.urljoin(
|
232
|
+
f"{self._client_wrapper.get_environment().default}/",
|
233
|
+
f"v1/workflow-deployments/{jsonable_encoder(id)}/release-tags/{jsonable_encoder(name)}",
|
234
|
+
),
|
235
|
+
params=jsonable_encoder(
|
236
|
+
request_options.get("additional_query_parameters") if request_options is not None else None
|
237
|
+
),
|
238
|
+
json=jsonable_encoder(_request)
|
239
|
+
if request_options is None or request_options.get("additional_body_parameters") is None
|
240
|
+
else {
|
241
|
+
**jsonable_encoder(_request),
|
242
|
+
**(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
|
243
|
+
},
|
244
|
+
headers=jsonable_encoder(
|
245
|
+
remove_none_from_dict(
|
246
|
+
{
|
247
|
+
**self._client_wrapper.get_headers(),
|
248
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
249
|
+
}
|
250
|
+
)
|
251
|
+
),
|
252
|
+
timeout=request_options.get("timeout_in_seconds")
|
253
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
254
|
+
else self._client_wrapper.get_timeout(),
|
255
|
+
retries=0,
|
256
|
+
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
|
257
|
+
)
|
258
|
+
if 200 <= _response.status_code < 300:
|
259
|
+
return pydantic_v1.parse_obj_as(WorkflowReleaseTagRead, _response.json()) # type: ignore
|
260
|
+
try:
|
261
|
+
_response_json = _response.json()
|
262
|
+
except JSONDecodeError:
|
263
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
264
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
265
|
+
|
138
266
|
|
139
267
|
class AsyncWorkflowDeploymentsClient:
|
140
268
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
@@ -257,3 +385,127 @@ class AsyncWorkflowDeploymentsClient:
|
|
257
385
|
except JSONDecodeError:
|
258
386
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
259
387
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
388
|
+
|
389
|
+
async def retrieve_workflow_release_tag(
|
390
|
+
self, id: str, name: str, *, request_options: typing.Optional[RequestOptions] = None
|
391
|
+
) -> WorkflowReleaseTagRead:
|
392
|
+
"""
|
393
|
+
Retrieve a Workflow Release Tag by tag name, associated with a specified Workflow Deployment.
|
394
|
+
|
395
|
+
Parameters:
|
396
|
+
- id: str. A UUID string identifying this workflow deployment.
|
397
|
+
|
398
|
+
- name: str. The name of the Release Tag associated with this Workflow Deployment that you'd like to retrieve.
|
399
|
+
|
400
|
+
- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
|
401
|
+
---
|
402
|
+
from vellum.client import AsyncVellum
|
403
|
+
|
404
|
+
client = AsyncVellum(
|
405
|
+
api_key="YOUR_API_KEY",
|
406
|
+
)
|
407
|
+
await client.workflow_deployments.retrieve_workflow_release_tag(
|
408
|
+
id="id",
|
409
|
+
name="name",
|
410
|
+
)
|
411
|
+
"""
|
412
|
+
_response = await self._client_wrapper.httpx_client.request(
|
413
|
+
method="GET",
|
414
|
+
url=urllib.parse.urljoin(
|
415
|
+
f"{self._client_wrapper.get_environment().default}/",
|
416
|
+
f"v1/workflow-deployments/{jsonable_encoder(id)}/release-tags/{jsonable_encoder(name)}",
|
417
|
+
),
|
418
|
+
params=jsonable_encoder(
|
419
|
+
request_options.get("additional_query_parameters") if request_options is not None else None
|
420
|
+
),
|
421
|
+
headers=jsonable_encoder(
|
422
|
+
remove_none_from_dict(
|
423
|
+
{
|
424
|
+
**self._client_wrapper.get_headers(),
|
425
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
426
|
+
}
|
427
|
+
)
|
428
|
+
),
|
429
|
+
timeout=request_options.get("timeout_in_seconds")
|
430
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
431
|
+
else self._client_wrapper.get_timeout(),
|
432
|
+
retries=0,
|
433
|
+
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
|
434
|
+
)
|
435
|
+
if 200 <= _response.status_code < 300:
|
436
|
+
return pydantic_v1.parse_obj_as(WorkflowReleaseTagRead, _response.json()) # type: ignore
|
437
|
+
try:
|
438
|
+
_response_json = _response.json()
|
439
|
+
except JSONDecodeError:
|
440
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
441
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
442
|
+
|
443
|
+
async def update_workflow_release_tag(
|
444
|
+
self,
|
445
|
+
id: str,
|
446
|
+
name: str,
|
447
|
+
*,
|
448
|
+
history_item_id: typing.Optional[str] = OMIT,
|
449
|
+
request_options: typing.Optional[RequestOptions] = None,
|
450
|
+
) -> WorkflowReleaseTagRead:
|
451
|
+
"""
|
452
|
+
Updates an existing Release Tag associated with the specified Workflow Deployment.
|
453
|
+
|
454
|
+
Parameters:
|
455
|
+
- id: str. A UUID string identifying this workflow deployment.
|
456
|
+
|
457
|
+
- name: str. The name of the Release Tag associated with this Workflow Deployment that you'd like to update.
|
458
|
+
|
459
|
+
- history_item_id: typing.Optional[str]. The ID of the Workflow Deployment History Item to tag
|
460
|
+
|
461
|
+
- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
|
462
|
+
---
|
463
|
+
from vellum.client import AsyncVellum
|
464
|
+
|
465
|
+
client = AsyncVellum(
|
466
|
+
api_key="YOUR_API_KEY",
|
467
|
+
)
|
468
|
+
await client.workflow_deployments.update_workflow_release_tag(
|
469
|
+
id="id",
|
470
|
+
name="name",
|
471
|
+
)
|
472
|
+
"""
|
473
|
+
_request: typing.Dict[str, typing.Any] = {}
|
474
|
+
if history_item_id is not OMIT:
|
475
|
+
_request["history_item_id"] = history_item_id
|
476
|
+
_response = await self._client_wrapper.httpx_client.request(
|
477
|
+
method="PATCH",
|
478
|
+
url=urllib.parse.urljoin(
|
479
|
+
f"{self._client_wrapper.get_environment().default}/",
|
480
|
+
f"v1/workflow-deployments/{jsonable_encoder(id)}/release-tags/{jsonable_encoder(name)}",
|
481
|
+
),
|
482
|
+
params=jsonable_encoder(
|
483
|
+
request_options.get("additional_query_parameters") if request_options is not None else None
|
484
|
+
),
|
485
|
+
json=jsonable_encoder(_request)
|
486
|
+
if request_options is None or request_options.get("additional_body_parameters") is None
|
487
|
+
else {
|
488
|
+
**jsonable_encoder(_request),
|
489
|
+
**(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
|
490
|
+
},
|
491
|
+
headers=jsonable_encoder(
|
492
|
+
remove_none_from_dict(
|
493
|
+
{
|
494
|
+
**self._client_wrapper.get_headers(),
|
495
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
496
|
+
}
|
497
|
+
)
|
498
|
+
),
|
499
|
+
timeout=request_options.get("timeout_in_seconds")
|
500
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
501
|
+
else self._client_wrapper.get_timeout(),
|
502
|
+
retries=0,
|
503
|
+
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
|
504
|
+
)
|
505
|
+
if 200 <= _response.status_code < 300:
|
506
|
+
return pydantic_v1.parse_obj_as(WorkflowReleaseTagRead, _response.json()) # type: ignore
|
507
|
+
try:
|
508
|
+
_response_json = _response.json()
|
509
|
+
except JSONDecodeError:
|
510
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
511
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
@@ -0,0 +1,192 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import typing
|
4
|
+
import urllib.parse
|
5
|
+
from json.decoder import JSONDecodeError
|
6
|
+
|
7
|
+
from ...core.api_error import ApiError
|
8
|
+
from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
9
|
+
from ...core.jsonable_encoder import jsonable_encoder
|
10
|
+
from ...core.pydantic_utilities import pydantic_v1
|
11
|
+
from ...core.remove_none_from_dict import remove_none_from_dict
|
12
|
+
from ...core.request_options import RequestOptions
|
13
|
+
from ...types.workflow_deployment_read import WorkflowDeploymentRead
|
14
|
+
|
15
|
+
# this is used as the default value for optional parameters
|
16
|
+
OMIT = typing.cast(typing.Any, ...)
|
17
|
+
|
18
|
+
|
19
|
+
class WorkflowSandboxesClient:
|
20
|
+
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
21
|
+
self._client_wrapper = client_wrapper
|
22
|
+
|
23
|
+
def deploy_workflow(
|
24
|
+
self,
|
25
|
+
id: str,
|
26
|
+
workflow_id: str,
|
27
|
+
*,
|
28
|
+
workflow_deployment_id: typing.Optional[str] = OMIT,
|
29
|
+
workflow_deployment_name: typing.Optional[str] = OMIT,
|
30
|
+
label: typing.Optional[str] = OMIT,
|
31
|
+
release_tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
32
|
+
request_options: typing.Optional[RequestOptions] = None,
|
33
|
+
) -> WorkflowDeploymentRead:
|
34
|
+
"""
|
35
|
+
Parameters:
|
36
|
+
- id: str. A UUID string identifying this workflow sandbox.
|
37
|
+
|
38
|
+
- workflow_id: str. An ID identifying the Workflow you'd like to deploy.
|
39
|
+
|
40
|
+
- workflow_deployment_id: typing.Optional[str]. The Vellum-generated ID of the Workflow Deployment you'd like to update. Cannot specify both this and workflow_deployment_name. Leave null to create a new Workflow Deployment.
|
41
|
+
|
42
|
+
- workflow_deployment_name: typing.Optional[str]. The unique name of the Workflow Deployment you'd like to either create or update. Cannot specify both this and workflow_deployment_id. If provided and matches an existing Workflow Deployment, that Workflow Deployment will be updated. Otherwise, a new Prompt Deployment will be created.
|
43
|
+
|
44
|
+
- label: typing.Optional[str]. In the event that a new Workflow Deployment is created, this will be the label it's given.
|
45
|
+
|
46
|
+
- release_tags: typing.Optional[typing.Sequence[str]]. Optionally provide the release tags that you'd like to be associated with the latest release of the created/updated Prompt Deployment.
|
47
|
+
|
48
|
+
- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
|
49
|
+
---
|
50
|
+
from vellum.client import Vellum
|
51
|
+
|
52
|
+
client = Vellum(
|
53
|
+
api_key="YOUR_API_KEY",
|
54
|
+
)
|
55
|
+
client.workflow_sandboxes.deploy_workflow(
|
56
|
+
id="id",
|
57
|
+
workflow_id="workflow_id",
|
58
|
+
)
|
59
|
+
"""
|
60
|
+
_request: typing.Dict[str, typing.Any] = {}
|
61
|
+
if workflow_deployment_id is not OMIT:
|
62
|
+
_request["workflow_deployment_id"] = workflow_deployment_id
|
63
|
+
if workflow_deployment_name is not OMIT:
|
64
|
+
_request["workflow_deployment_name"] = workflow_deployment_name
|
65
|
+
if label is not OMIT:
|
66
|
+
_request["label"] = label
|
67
|
+
if release_tags is not OMIT:
|
68
|
+
_request["release_tags"] = release_tags
|
69
|
+
_response = self._client_wrapper.httpx_client.request(
|
70
|
+
method="POST",
|
71
|
+
url=urllib.parse.urljoin(
|
72
|
+
f"{self._client_wrapper.get_environment().default}/",
|
73
|
+
f"v1/workflow-sandboxes/{jsonable_encoder(id)}/workflows/{jsonable_encoder(workflow_id)}/deploy",
|
74
|
+
),
|
75
|
+
params=jsonable_encoder(
|
76
|
+
request_options.get("additional_query_parameters") if request_options is not None else None
|
77
|
+
),
|
78
|
+
json=jsonable_encoder(_request)
|
79
|
+
if request_options is None or request_options.get("additional_body_parameters") is None
|
80
|
+
else {
|
81
|
+
**jsonable_encoder(_request),
|
82
|
+
**(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
|
83
|
+
},
|
84
|
+
headers=jsonable_encoder(
|
85
|
+
remove_none_from_dict(
|
86
|
+
{
|
87
|
+
**self._client_wrapper.get_headers(),
|
88
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
89
|
+
}
|
90
|
+
)
|
91
|
+
),
|
92
|
+
timeout=request_options.get("timeout_in_seconds")
|
93
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
94
|
+
else self._client_wrapper.get_timeout(),
|
95
|
+
retries=0,
|
96
|
+
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
|
97
|
+
)
|
98
|
+
if 200 <= _response.status_code < 300:
|
99
|
+
return pydantic_v1.parse_obj_as(WorkflowDeploymentRead, _response.json()) # type: ignore
|
100
|
+
try:
|
101
|
+
_response_json = _response.json()
|
102
|
+
except JSONDecodeError:
|
103
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
104
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
105
|
+
|
106
|
+
|
107
|
+
class AsyncWorkflowSandboxesClient:
|
108
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
109
|
+
self._client_wrapper = client_wrapper
|
110
|
+
|
111
|
+
async def deploy_workflow(
|
112
|
+
self,
|
113
|
+
id: str,
|
114
|
+
workflow_id: str,
|
115
|
+
*,
|
116
|
+
workflow_deployment_id: typing.Optional[str] = OMIT,
|
117
|
+
workflow_deployment_name: typing.Optional[str] = OMIT,
|
118
|
+
label: typing.Optional[str] = OMIT,
|
119
|
+
release_tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
120
|
+
request_options: typing.Optional[RequestOptions] = None,
|
121
|
+
) -> WorkflowDeploymentRead:
|
122
|
+
"""
|
123
|
+
Parameters:
|
124
|
+
- id: str. A UUID string identifying this workflow sandbox.
|
125
|
+
|
126
|
+
- workflow_id: str. An ID identifying the Workflow you'd like to deploy.
|
127
|
+
|
128
|
+
- workflow_deployment_id: typing.Optional[str]. The Vellum-generated ID of the Workflow Deployment you'd like to update. Cannot specify both this and workflow_deployment_name. Leave null to create a new Workflow Deployment.
|
129
|
+
|
130
|
+
- workflow_deployment_name: typing.Optional[str]. The unique name of the Workflow Deployment you'd like to either create or update. Cannot specify both this and workflow_deployment_id. If provided and matches an existing Workflow Deployment, that Workflow Deployment will be updated. Otherwise, a new Prompt Deployment will be created.
|
131
|
+
|
132
|
+
- label: typing.Optional[str]. In the event that a new Workflow Deployment is created, this will be the label it's given.
|
133
|
+
|
134
|
+
- release_tags: typing.Optional[typing.Sequence[str]]. Optionally provide the release tags that you'd like to be associated with the latest release of the created/updated Prompt Deployment.
|
135
|
+
|
136
|
+
- request_options: typing.Optional[RequestOptions]. Request-specific configuration.
|
137
|
+
---
|
138
|
+
from vellum.client import AsyncVellum
|
139
|
+
|
140
|
+
client = AsyncVellum(
|
141
|
+
api_key="YOUR_API_KEY",
|
142
|
+
)
|
143
|
+
await client.workflow_sandboxes.deploy_workflow(
|
144
|
+
id="id",
|
145
|
+
workflow_id="workflow_id",
|
146
|
+
)
|
147
|
+
"""
|
148
|
+
_request: typing.Dict[str, typing.Any] = {}
|
149
|
+
if workflow_deployment_id is not OMIT:
|
150
|
+
_request["workflow_deployment_id"] = workflow_deployment_id
|
151
|
+
if workflow_deployment_name is not OMIT:
|
152
|
+
_request["workflow_deployment_name"] = workflow_deployment_name
|
153
|
+
if label is not OMIT:
|
154
|
+
_request["label"] = label
|
155
|
+
if release_tags is not OMIT:
|
156
|
+
_request["release_tags"] = release_tags
|
157
|
+
_response = await self._client_wrapper.httpx_client.request(
|
158
|
+
method="POST",
|
159
|
+
url=urllib.parse.urljoin(
|
160
|
+
f"{self._client_wrapper.get_environment().default}/",
|
161
|
+
f"v1/workflow-sandboxes/{jsonable_encoder(id)}/workflows/{jsonable_encoder(workflow_id)}/deploy",
|
162
|
+
),
|
163
|
+
params=jsonable_encoder(
|
164
|
+
request_options.get("additional_query_parameters") if request_options is not None else None
|
165
|
+
),
|
166
|
+
json=jsonable_encoder(_request)
|
167
|
+
if request_options is None or request_options.get("additional_body_parameters") is None
|
168
|
+
else {
|
169
|
+
**jsonable_encoder(_request),
|
170
|
+
**(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))),
|
171
|
+
},
|
172
|
+
headers=jsonable_encoder(
|
173
|
+
remove_none_from_dict(
|
174
|
+
{
|
175
|
+
**self._client_wrapper.get_headers(),
|
176
|
+
**(request_options.get("additional_headers", {}) if request_options is not None else {}),
|
177
|
+
}
|
178
|
+
)
|
179
|
+
),
|
180
|
+
timeout=request_options.get("timeout_in_seconds")
|
181
|
+
if request_options is not None and request_options.get("timeout_in_seconds") is not None
|
182
|
+
else self._client_wrapper.get_timeout(),
|
183
|
+
retries=0,
|
184
|
+
max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore
|
185
|
+
)
|
186
|
+
if 200 <= _response.status_code < 300:
|
187
|
+
return pydantic_v1.parse_obj_as(WorkflowDeploymentRead, _response.json()) # type: ignore
|
188
|
+
try:
|
189
|
+
_response_json = _response.json()
|
190
|
+
except JSONDecodeError:
|
191
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
192
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
vellum/types/__init__.py
CHANGED
@@ -94,6 +94,8 @@ from .conditional_node_result import ConditionalNodeResult
|
|
94
94
|
from .conditional_node_result_data import ConditionalNodeResultData
|
95
95
|
from .deployment_provider_payload_response import DeploymentProviderPayloadResponse
|
96
96
|
from .deployment_read import DeploymentRead
|
97
|
+
from .deployment_release_tag_deployment_history_item import DeploymentReleaseTagDeploymentHistoryItem
|
98
|
+
from .deployment_release_tag_read import DeploymentReleaseTagRead
|
97
99
|
from .document_document_to_document_index import DocumentDocumentToDocumentIndex
|
98
100
|
from .document_index_chunking import (
|
99
101
|
DocumentIndexChunking,
|
@@ -365,6 +367,7 @@ from .rejected_execute_prompt_response import RejectedExecutePromptResponse
|
|
365
367
|
from .rejected_execute_workflow_workflow_result_event import RejectedExecuteWorkflowWorkflowResultEvent
|
366
368
|
from .rejected_prompt_execution_meta import RejectedPromptExecutionMeta
|
367
369
|
from .rejected_workflow_node_result_event import RejectedWorkflowNodeResultEvent
|
370
|
+
from .release_tag_source import ReleaseTagSource
|
368
371
|
from .sandbox_scenario import SandboxScenario
|
369
372
|
from .scenario_input import ScenarioInput, ScenarioInput_ChatHistory, ScenarioInput_String
|
370
373
|
from .scenario_input_chat_history_variable_value import ScenarioInputChatHistoryVariableValue
|
@@ -412,6 +415,7 @@ from .submit_workflow_execution_actual_request import (
|
|
412
415
|
)
|
413
416
|
from .subworkflow_enum import SubworkflowEnum
|
414
417
|
from .subworkflow_node_result import SubworkflowNodeResult
|
418
|
+
from .subworkflow_node_result_data import SubworkflowNodeResultData
|
415
419
|
from .templating_node_array_result import TemplatingNodeArrayResult
|
416
420
|
from .templating_node_chat_history_result import TemplatingNodeChatHistoryResult
|
417
421
|
from .templating_node_error_result import TemplatingNodeErrorResult
|
@@ -608,6 +612,8 @@ from .workflow_output_json import WorkflowOutputJson
|
|
608
612
|
from .workflow_output_number import WorkflowOutputNumber
|
609
613
|
from .workflow_output_search_results import WorkflowOutputSearchResults
|
610
614
|
from .workflow_output_string import WorkflowOutputString
|
615
|
+
from .workflow_release_tag_read import WorkflowReleaseTagRead
|
616
|
+
from .workflow_release_tag_workflow_deployment_history_item import WorkflowReleaseTagWorkflowDeploymentHistoryItem
|
611
617
|
from .workflow_request_chat_history_input_request import WorkflowRequestChatHistoryInputRequest
|
612
618
|
from .workflow_request_input_request import (
|
613
619
|
WorkflowRequestInputRequest,
|
@@ -714,6 +720,8 @@ __all__ = [
|
|
714
720
|
"ConditionalNodeResultData",
|
715
721
|
"DeploymentProviderPayloadResponse",
|
716
722
|
"DeploymentRead",
|
723
|
+
"DeploymentReleaseTagDeploymentHistoryItem",
|
724
|
+
"DeploymentReleaseTagRead",
|
717
725
|
"DocumentDocumentToDocumentIndex",
|
718
726
|
"DocumentIndexChunking",
|
719
727
|
"DocumentIndexChunkingRequest",
|
@@ -955,6 +963,7 @@ __all__ = [
|
|
955
963
|
"RejectedExecuteWorkflowWorkflowResultEvent",
|
956
964
|
"RejectedPromptExecutionMeta",
|
957
965
|
"RejectedWorkflowNodeResultEvent",
|
966
|
+
"ReleaseTagSource",
|
958
967
|
"SandboxScenario",
|
959
968
|
"ScenarioInput",
|
960
969
|
"ScenarioInputChatHistoryVariableValue",
|
@@ -1002,6 +1011,7 @@ __all__ = [
|
|
1002
1011
|
"SubmitWorkflowExecutionActualRequest_String",
|
1003
1012
|
"SubworkflowEnum",
|
1004
1013
|
"SubworkflowNodeResult",
|
1014
|
+
"SubworkflowNodeResultData",
|
1005
1015
|
"TemplatingNodeArrayResult",
|
1006
1016
|
"TemplatingNodeChatHistoryResult",
|
1007
1017
|
"TemplatingNodeErrorResult",
|
@@ -1172,6 +1182,8 @@ __all__ = [
|
|
1172
1182
|
"WorkflowOutput_Number",
|
1173
1183
|
"WorkflowOutput_SearchResults",
|
1174
1184
|
"WorkflowOutput_String",
|
1185
|
+
"WorkflowReleaseTagRead",
|
1186
|
+
"WorkflowReleaseTagWorkflowDeploymentHistoryItem",
|
1175
1187
|
"WorkflowRequestChatHistoryInputRequest",
|
1176
1188
|
"WorkflowRequestInputRequest",
|
1177
1189
|
"WorkflowRequestInputRequest_ChatHistory",
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
|
9
|
+
|
10
|
+
class DeploymentReleaseTagDeploymentHistoryItem(pydantic_v1.BaseModel):
|
11
|
+
id: str
|
12
|
+
timestamp: dt.datetime
|
13
|
+
|
14
|
+
def json(self, **kwargs: typing.Any) -> str:
|
15
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
16
|
+
return super().json(**kwargs_with_defaults)
|
17
|
+
|
18
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
20
|
+
return super().dict(**kwargs_with_defaults)
|
21
|
+
|
22
|
+
class Config:
|
23
|
+
frozen = True
|
24
|
+
smart_union = True
|
25
|
+
extra = pydantic_v1.Extra.allow
|
26
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,43 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
from .deployment_release_tag_deployment_history_item import DeploymentReleaseTagDeploymentHistoryItem
|
9
|
+
from .release_tag_source import ReleaseTagSource
|
10
|
+
|
11
|
+
|
12
|
+
class DeploymentReleaseTagRead(pydantic_v1.BaseModel):
|
13
|
+
name: str = pydantic_v1.Field()
|
14
|
+
"""
|
15
|
+
The name of the Release Tag
|
16
|
+
"""
|
17
|
+
|
18
|
+
source: ReleaseTagSource = pydantic_v1.Field()
|
19
|
+
"""
|
20
|
+
The source of how the Release Tag was originally created
|
21
|
+
|
22
|
+
- `SYSTEM` - System
|
23
|
+
- `USER` - User
|
24
|
+
"""
|
25
|
+
|
26
|
+
history_item: DeploymentReleaseTagDeploymentHistoryItem = pydantic_v1.Field()
|
27
|
+
"""
|
28
|
+
The Deployment History Item that this Release Tag is associated with
|
29
|
+
"""
|
30
|
+
|
31
|
+
def json(self, **kwargs: typing.Any) -> str:
|
32
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
33
|
+
return super().json(**kwargs_with_defaults)
|
34
|
+
|
35
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
36
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
37
|
+
return super().dict(**kwargs_with_defaults)
|
38
|
+
|
39
|
+
class Config:
|
40
|
+
frozen = True
|
41
|
+
smart_union = True
|
42
|
+
extra = pydantic_v1.Extra.allow
|
43
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -5,6 +5,7 @@ import typing
|
|
5
5
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
7
7
|
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
from .subworkflow_node_result_data import SubworkflowNodeResultData
|
8
9
|
|
9
10
|
|
10
11
|
class SubworkflowNodeResult(pydantic_v1.BaseModel):
|
@@ -12,6 +13,8 @@ class SubworkflowNodeResult(pydantic_v1.BaseModel):
|
|
12
13
|
A Node Result Event emitted from a Subworkflow Node.
|
13
14
|
"""
|
14
15
|
|
16
|
+
data: typing.Optional[SubworkflowNodeResultData] = None
|
17
|
+
|
15
18
|
def json(self, **kwargs: typing.Any) -> str:
|
16
19
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
17
20
|
return super().json(**kwargs_with_defaults)
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
|
9
|
+
|
10
|
+
class SubworkflowNodeResultData(pydantic_v1.BaseModel):
|
11
|
+
execution_id: typing.Optional[str] = None
|
12
|
+
|
13
|
+
def json(self, **kwargs: typing.Any) -> str:
|
14
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
15
|
+
return super().json(**kwargs_with_defaults)
|
16
|
+
|
17
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
18
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
+
return super().dict(**kwargs_with_defaults)
|
20
|
+
|
21
|
+
class Config:
|
22
|
+
frozen = True
|
23
|
+
smart_union = True
|
24
|
+
extra = pydantic_v1.Extra.allow
|
25
|
+
json_encoders = {dt.datetime: serialize_datetime}
|