prefect-client 3.1.4__py3-none-any.whl → 3.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/__init__.py +3 -0
- prefect/_internal/compatibility/migration.py +1 -1
- prefect/_internal/concurrency/api.py +52 -52
- prefect/_internal/concurrency/calls.py +59 -35
- prefect/_internal/concurrency/cancellation.py +34 -18
- prefect/_internal/concurrency/event_loop.py +7 -6
- prefect/_internal/concurrency/threads.py +41 -33
- prefect/_internal/concurrency/waiters.py +28 -21
- prefect/_internal/pydantic/v1_schema.py +2 -2
- prefect/_internal/pydantic/v2_schema.py +10 -9
- prefect/_internal/schemas/bases.py +10 -11
- prefect/_internal/schemas/validators.py +2 -1
- prefect/_version.py +3 -3
- prefect/automations.py +53 -47
- prefect/blocks/abstract.py +12 -10
- prefect/blocks/core.py +4 -2
- prefect/cache_policies.py +11 -11
- prefect/client/__init__.py +3 -1
- prefect/client/base.py +36 -37
- prefect/client/cloud.py +26 -19
- prefect/client/collections.py +2 -2
- prefect/client/orchestration.py +366 -277
- prefect/client/schemas/__init__.py +24 -0
- prefect/client/schemas/actions.py +132 -120
- prefect/client/schemas/filters.py +5 -0
- prefect/client/schemas/objects.py +113 -85
- prefect/client/schemas/responses.py +21 -18
- prefect/client/schemas/schedules.py +136 -93
- prefect/client/subscriptions.py +28 -14
- prefect/client/utilities.py +32 -36
- prefect/concurrency/asyncio.py +6 -9
- prefect/concurrency/services.py +3 -0
- prefect/concurrency/sync.py +35 -5
- prefect/context.py +39 -31
- prefect/deployments/flow_runs.py +3 -5
- prefect/docker/__init__.py +1 -1
- prefect/events/schemas/events.py +25 -20
- prefect/events/utilities.py +1 -2
- prefect/filesystems.py +3 -3
- prefect/flow_engine.py +755 -138
- prefect/flow_runs.py +3 -3
- prefect/flows.py +214 -170
- prefect/logging/configuration.py +1 -1
- prefect/logging/highlighters.py +1 -2
- prefect/logging/loggers.py +30 -20
- prefect/main.py +17 -24
- prefect/runner/runner.py +43 -21
- prefect/runner/server.py +30 -32
- prefect/runner/submit.py +3 -6
- prefect/runner/utils.py +6 -6
- prefect/runtime/flow_run.py +7 -0
- prefect/settings/constants.py +2 -2
- prefect/settings/legacy.py +1 -1
- prefect/settings/models/server/events.py +10 -0
- prefect/settings/sources.py +9 -2
- prefect/task_engine.py +72 -19
- prefect/task_runners.py +2 -2
- prefect/tasks.py +46 -33
- prefect/telemetry/bootstrap.py +15 -2
- prefect/telemetry/run_telemetry.py +107 -0
- prefect/transactions.py +14 -14
- prefect/types/__init__.py +20 -3
- prefect/utilities/_engine.py +96 -0
- prefect/utilities/annotations.py +25 -18
- prefect/utilities/asyncutils.py +126 -140
- prefect/utilities/callables.py +87 -78
- prefect/utilities/collections.py +278 -117
- prefect/utilities/compat.py +13 -21
- prefect/utilities/context.py +6 -5
- prefect/utilities/dispatch.py +23 -12
- prefect/utilities/dockerutils.py +33 -32
- prefect/utilities/engine.py +126 -239
- prefect/utilities/filesystem.py +18 -15
- prefect/utilities/hashing.py +10 -11
- prefect/utilities/importtools.py +40 -27
- prefect/utilities/math.py +9 -5
- prefect/utilities/names.py +3 -3
- prefect/utilities/processutils.py +121 -57
- prefect/utilities/pydantic.py +41 -36
- prefect/utilities/render_swagger.py +22 -12
- prefect/utilities/schema_tools/__init__.py +2 -1
- prefect/utilities/schema_tools/hydration.py +50 -43
- prefect/utilities/schema_tools/validation.py +52 -42
- prefect/utilities/services.py +13 -12
- prefect/utilities/templating.py +45 -45
- prefect/utilities/text.py +2 -1
- prefect/utilities/timeout.py +4 -4
- prefect/utilities/urls.py +9 -4
- prefect/utilities/visualization.py +46 -24
- prefect/variables.py +9 -8
- prefect/workers/base.py +18 -10
- {prefect_client-3.1.4.dist-info → prefect_client-3.1.6.dist-info}/METADATA +5 -5
- {prefect_client-3.1.4.dist-info → prefect_client-3.1.6.dist-info}/RECORD +96 -94
- {prefect_client-3.1.4.dist-info → prefect_client-3.1.6.dist-info}/WHEEL +1 -1
- {prefect_client-3.1.4.dist-info → prefect_client-3.1.6.dist-info}/LICENSE +0 -0
- {prefect_client-3.1.4.dist-info → prefect_client-3.1.6.dist-info}/top_level.txt +0 -0
prefect/client/orchestration.py
CHANGED
@@ -1,21 +1,11 @@
|
|
1
1
|
import asyncio
|
2
2
|
import datetime
|
3
|
+
import ssl
|
3
4
|
import warnings
|
5
|
+
from collections.abc import Iterable
|
4
6
|
from contextlib import AsyncExitStack
|
5
|
-
from
|
6
|
-
|
7
|
-
Any,
|
8
|
-
Dict,
|
9
|
-
Iterable,
|
10
|
-
List,
|
11
|
-
Literal,
|
12
|
-
Optional,
|
13
|
-
Set,
|
14
|
-
Tuple,
|
15
|
-
TypeVar,
|
16
|
-
Union,
|
17
|
-
overload,
|
18
|
-
)
|
7
|
+
from logging import Logger
|
8
|
+
from typing import TYPE_CHECKING, Any, Literal, NoReturn, Optional, Union, overload
|
19
9
|
from uuid import UUID, uuid4
|
20
10
|
|
21
11
|
import certifi
|
@@ -26,7 +16,7 @@ import pydantic
|
|
26
16
|
from asgi_lifespan import LifespanManager
|
27
17
|
from packaging import version
|
28
18
|
from starlette import status
|
29
|
-
from typing_extensions import ParamSpec
|
19
|
+
from typing_extensions import ParamSpec, Self, TypeVar
|
30
20
|
|
31
21
|
import prefect
|
32
22
|
import prefect.exceptions
|
@@ -137,6 +127,7 @@ from prefect.settings import (
|
|
137
127
|
PREFECT_TESTING_UNIT_TEST_MODE,
|
138
128
|
get_current_settings,
|
139
129
|
)
|
130
|
+
from prefect.types import KeyValueLabelsField
|
140
131
|
|
141
132
|
if TYPE_CHECKING:
|
142
133
|
from prefect.flows import Flow as FlowObject
|
@@ -151,26 +142,29 @@ from prefect.client.base import (
|
|
151
142
|
)
|
152
143
|
|
153
144
|
P = ParamSpec("P")
|
154
|
-
R = TypeVar("R")
|
145
|
+
R = TypeVar("R", infer_variance=True)
|
146
|
+
T = TypeVar("T")
|
155
147
|
|
156
148
|
|
157
149
|
@overload
|
158
150
|
def get_client(
|
159
|
-
|
151
|
+
*,
|
152
|
+
httpx_settings: Optional[dict[str, Any]] = ...,
|
153
|
+
sync_client: Literal[False] = False,
|
160
154
|
) -> "PrefectClient":
|
161
155
|
...
|
162
156
|
|
163
157
|
|
164
158
|
@overload
|
165
159
|
def get_client(
|
166
|
-
httpx_settings: Optional[
|
160
|
+
*, httpx_settings: Optional[dict[str, Any]] = ..., sync_client: Literal[True] = ...
|
167
161
|
) -> "SyncPrefectClient":
|
168
162
|
...
|
169
163
|
|
170
164
|
|
171
165
|
def get_client(
|
172
|
-
httpx_settings: Optional[
|
173
|
-
):
|
166
|
+
httpx_settings: Optional[dict[str, Any]] = None, sync_client: bool = False
|
167
|
+
) -> Union["SyncPrefectClient", "PrefectClient"]:
|
174
168
|
"""
|
175
169
|
Retrieve a HTTP client for communicating with the Prefect REST API.
|
176
170
|
|
@@ -199,18 +193,21 @@ def get_client(
|
|
199
193
|
|
200
194
|
if sync_client:
|
201
195
|
if client_ctx := prefect.context.SyncClientContext.get():
|
202
|
-
if
|
196
|
+
if (
|
197
|
+
client_ctx.client
|
198
|
+
and getattr(client_ctx, "_httpx_settings", None) == httpx_settings
|
199
|
+
):
|
203
200
|
return client_ctx.client
|
204
201
|
else:
|
205
202
|
if client_ctx := prefect.context.AsyncClientContext.get():
|
206
203
|
if (
|
207
204
|
client_ctx.client
|
208
|
-
and client_ctx
|
209
|
-
and loop in (client_ctx.client
|
205
|
+
and getattr(client_ctx, "_httpx_settings", None) == httpx_settings
|
206
|
+
and loop in (getattr(client_ctx.client, "_loop", None), None)
|
210
207
|
):
|
211
208
|
return client_ctx.client
|
212
209
|
|
213
|
-
api = PREFECT_API_URL.value()
|
210
|
+
api: str = PREFECT_API_URL.value()
|
214
211
|
server_type = None
|
215
212
|
|
216
213
|
if not api and PREFECT_SERVER_ALLOW_EPHEMERAL_MODE:
|
@@ -276,19 +273,25 @@ class PrefectClient:
|
|
276
273
|
*,
|
277
274
|
api_key: Optional[str] = None,
|
278
275
|
api_version: Optional[str] = None,
|
279
|
-
httpx_settings: Optional[
|
276
|
+
httpx_settings: Optional[dict[str, Any]] = None,
|
280
277
|
server_type: Optional[ServerType] = None,
|
281
278
|
) -> None:
|
282
279
|
httpx_settings = httpx_settings.copy() if httpx_settings else {}
|
283
280
|
httpx_settings.setdefault("headers", {})
|
284
281
|
|
285
282
|
if PREFECT_API_TLS_INSECURE_SKIP_VERIFY:
|
286
|
-
|
283
|
+
# Create an unverified context for insecure connections
|
284
|
+
ctx = ssl.create_default_context()
|
285
|
+
ctx.check_hostname = False
|
286
|
+
ctx.verify_mode = ssl.CERT_NONE
|
287
|
+
httpx_settings.setdefault("verify", ctx)
|
287
288
|
else:
|
288
289
|
cert_file = PREFECT_API_SSL_CERT_FILE.value()
|
289
290
|
if not cert_file:
|
290
291
|
cert_file = certifi.where()
|
291
|
-
|
292
|
+
# Create a verified context with the certificate file
|
293
|
+
ctx = ssl.create_default_context(cafile=cert_file)
|
294
|
+
httpx_settings.setdefault("verify", ctx)
|
292
295
|
|
293
296
|
if api_version is None:
|
294
297
|
api_version = SERVER_API_VERSION
|
@@ -350,7 +353,7 @@ class PrefectClient:
|
|
350
353
|
)
|
351
354
|
|
352
355
|
# Connect to an in-process application
|
353
|
-
|
356
|
+
else:
|
354
357
|
self._ephemeral_app = api
|
355
358
|
self.server_type = ServerType.EPHEMERAL
|
356
359
|
|
@@ -370,12 +373,6 @@ class PrefectClient:
|
|
370
373
|
)
|
371
374
|
httpx_settings.setdefault("base_url", "http://ephemeral-prefect/api")
|
372
375
|
|
373
|
-
else:
|
374
|
-
raise TypeError(
|
375
|
-
f"Unexpected type {type(api).__name__!r} for argument `api`. Expected"
|
376
|
-
" 'str' or 'ASGIApp/FastAPI'"
|
377
|
-
)
|
378
|
-
|
379
376
|
# See https://www.python-httpx.org/advanced/#timeout-configuration
|
380
377
|
httpx_settings.setdefault(
|
381
378
|
"timeout",
|
@@ -419,9 +416,9 @@ class PrefectClient:
|
|
419
416
|
if isinstance(server_transport, httpx.AsyncHTTPTransport):
|
420
417
|
pool = getattr(server_transport, "_pool", None)
|
421
418
|
if isinstance(pool, httpcore.AsyncConnectionPool):
|
422
|
-
pool
|
419
|
+
setattr(pool, "_retries", 3)
|
423
420
|
|
424
|
-
self.logger = get_logger("client")
|
421
|
+
self.logger: Logger = get_logger("client")
|
425
422
|
|
426
423
|
@property
|
427
424
|
def api_url(self) -> httpx.URL:
|
@@ -451,7 +448,7 @@ class PrefectClient:
|
|
451
448
|
"""
|
452
449
|
return await self._client.get("/hello")
|
453
450
|
|
454
|
-
async def create_flow(self, flow: "FlowObject") -> UUID:
|
451
|
+
async def create_flow(self, flow: "FlowObject[Any, Any]") -> UUID:
|
455
452
|
"""
|
456
453
|
Create a flow in the Prefect API.
|
457
454
|
|
@@ -504,19 +501,37 @@ class PrefectClient:
|
|
504
501
|
response = await self._client.get(f"/flows/{flow_id}")
|
505
502
|
return Flow.model_validate(response.json())
|
506
503
|
|
504
|
+
async def delete_flow(self, flow_id: UUID) -> None:
|
505
|
+
"""
|
506
|
+
Delete a flow by UUID.
|
507
|
+
|
508
|
+
Args:
|
509
|
+
flow_id: ID of the flow to be deleted
|
510
|
+
Raises:
|
511
|
+
prefect.exceptions.ObjectNotFound: If request returns 404
|
512
|
+
httpx.RequestError: If requests fail
|
513
|
+
"""
|
514
|
+
try:
|
515
|
+
await self._client.delete(f"/flows/{flow_id}")
|
516
|
+
except httpx.HTTPStatusError as e:
|
517
|
+
if e.response.status_code == status.HTTP_404_NOT_FOUND:
|
518
|
+
raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
|
519
|
+
else:
|
520
|
+
raise
|
521
|
+
|
507
522
|
async def read_flows(
|
508
523
|
self,
|
509
524
|
*,
|
510
|
-
flow_filter: FlowFilter = None,
|
511
|
-
flow_run_filter: FlowRunFilter = None,
|
512
|
-
task_run_filter: TaskRunFilter = None,
|
513
|
-
deployment_filter: DeploymentFilter = None,
|
514
|
-
work_pool_filter: WorkPoolFilter = None,
|
515
|
-
work_queue_filter: WorkQueueFilter = None,
|
516
|
-
sort: FlowSort = None,
|
525
|
+
flow_filter: Optional[FlowFilter] = None,
|
526
|
+
flow_run_filter: Optional[FlowRunFilter] = None,
|
527
|
+
task_run_filter: Optional[TaskRunFilter] = None,
|
528
|
+
deployment_filter: Optional[DeploymentFilter] = None,
|
529
|
+
work_pool_filter: Optional[WorkPoolFilter] = None,
|
530
|
+
work_queue_filter: Optional[WorkQueueFilter] = None,
|
531
|
+
sort: Optional[FlowSort] = None,
|
517
532
|
limit: Optional[int] = None,
|
518
533
|
offset: int = 0,
|
519
|
-
) ->
|
534
|
+
) -> list[Flow]:
|
520
535
|
"""
|
521
536
|
Query the Prefect API for flows. Only flows matching all criteria will
|
522
537
|
be returned.
|
@@ -535,7 +550,7 @@ class PrefectClient:
|
|
535
550
|
Returns:
|
536
551
|
a list of Flow model representations of the flows
|
537
552
|
"""
|
538
|
-
body = {
|
553
|
+
body: dict[str, Any] = {
|
539
554
|
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
|
540
555
|
"flow_runs": (
|
541
556
|
flow_run_filter.model_dump(mode="json", exclude_unset=True)
|
@@ -560,7 +575,7 @@ class PrefectClient:
|
|
560
575
|
}
|
561
576
|
|
562
577
|
response = await self._client.post("/flows/filter", json=body)
|
563
|
-
return pydantic.TypeAdapter(
|
578
|
+
return pydantic.TypeAdapter(list[Flow]).validate_python(response.json())
|
564
579
|
|
565
580
|
async def read_flow_by_name(
|
566
581
|
self,
|
@@ -582,15 +597,15 @@ class PrefectClient:
|
|
582
597
|
self,
|
583
598
|
deployment_id: UUID,
|
584
599
|
*,
|
585
|
-
parameters: Optional[
|
586
|
-
context: Optional[
|
587
|
-
state: Optional[prefect.states.State] = None,
|
600
|
+
parameters: Optional[dict[str, Any]] = None,
|
601
|
+
context: Optional[dict[str, Any]] = None,
|
602
|
+
state: Optional[prefect.states.State[Any]] = None,
|
588
603
|
name: Optional[str] = None,
|
589
604
|
tags: Optional[Iterable[str]] = None,
|
590
605
|
idempotency_key: Optional[str] = None,
|
591
606
|
parent_task_run_id: Optional[UUID] = None,
|
592
607
|
work_queue_name: Optional[str] = None,
|
593
|
-
job_variables: Optional[
|
608
|
+
job_variables: Optional[dict[str, Any]] = None,
|
594
609
|
) -> FlowRun:
|
595
610
|
"""
|
596
611
|
Create a flow run for a deployment.
|
@@ -631,7 +646,7 @@ class PrefectClient:
|
|
631
646
|
parameters=parameters,
|
632
647
|
context=context,
|
633
648
|
state=state.to_state_create(),
|
634
|
-
tags=tags,
|
649
|
+
tags=list(tags),
|
635
650
|
name=name,
|
636
651
|
idempotency_key=idempotency_key,
|
637
652
|
parent_task_run_id=parent_task_run_id,
|
@@ -650,13 +665,13 @@ class PrefectClient:
|
|
650
665
|
|
651
666
|
async def create_flow_run(
|
652
667
|
self,
|
653
|
-
flow: "FlowObject",
|
668
|
+
flow: "FlowObject[Any, R]",
|
654
669
|
name: Optional[str] = None,
|
655
|
-
parameters: Optional[
|
656
|
-
context: Optional[
|
670
|
+
parameters: Optional[dict[str, Any]] = None,
|
671
|
+
context: Optional[dict[str, Any]] = None,
|
657
672
|
tags: Optional[Iterable[str]] = None,
|
658
673
|
parent_task_run_id: Optional[UUID] = None,
|
659
|
-
state: Optional["prefect.states.State"] = None,
|
674
|
+
state: Optional["prefect.states.State[R]"] = None,
|
660
675
|
) -> FlowRun:
|
661
676
|
"""
|
662
677
|
Create a flow run for a flow.
|
@@ -698,7 +713,7 @@ class PrefectClient:
|
|
698
713
|
state=state.to_state_create(),
|
699
714
|
empirical_policy=FlowRunPolicy(
|
700
715
|
retries=flow.retries,
|
701
|
-
retry_delay=flow.retry_delay_seconds,
|
716
|
+
retry_delay=int(flow.retry_delay_seconds or 0),
|
702
717
|
),
|
703
718
|
)
|
704
719
|
|
@@ -716,12 +731,12 @@ class PrefectClient:
|
|
716
731
|
self,
|
717
732
|
flow_run_id: UUID,
|
718
733
|
flow_version: Optional[str] = None,
|
719
|
-
parameters: Optional[dict] = None,
|
734
|
+
parameters: Optional[dict[str, Any]] = None,
|
720
735
|
name: Optional[str] = None,
|
721
736
|
tags: Optional[Iterable[str]] = None,
|
722
737
|
empirical_policy: Optional[FlowRunPolicy] = None,
|
723
738
|
infrastructure_pid: Optional[str] = None,
|
724
|
-
job_variables: Optional[dict] = None,
|
739
|
+
job_variables: Optional[dict[str, Any]] = None,
|
725
740
|
) -> httpx.Response:
|
726
741
|
"""
|
727
742
|
Update a flow run's details.
|
@@ -742,7 +757,7 @@ class PrefectClient:
|
|
742
757
|
Returns:
|
743
758
|
an `httpx.Response` object from the PATCH request
|
744
759
|
"""
|
745
|
-
params = {}
|
760
|
+
params: dict[str, Any] = {}
|
746
761
|
if flow_version is not None:
|
747
762
|
params["flow_version"] = flow_version
|
748
763
|
if parameters is not None:
|
@@ -825,7 +840,7 @@ class PrefectClient:
|
|
825
840
|
async def read_concurrency_limit_by_tag(
|
826
841
|
self,
|
827
842
|
tag: str,
|
828
|
-
):
|
843
|
+
) -> ConcurrencyLimit:
|
829
844
|
"""
|
830
845
|
Read the concurrency limit set on a specific tag.
|
831
846
|
|
@@ -861,7 +876,7 @@ class PrefectClient:
|
|
861
876
|
self,
|
862
877
|
limit: int,
|
863
878
|
offset: int,
|
864
|
-
):
|
879
|
+
) -> list[ConcurrencyLimit]:
|
865
880
|
"""
|
866
881
|
Lists concurrency limits set on task run tags.
|
867
882
|
|
@@ -879,15 +894,15 @@ class PrefectClient:
|
|
879
894
|
}
|
880
895
|
|
881
896
|
response = await self._client.post("/concurrency_limits/filter", json=body)
|
882
|
-
return pydantic.TypeAdapter(
|
897
|
+
return pydantic.TypeAdapter(list[ConcurrencyLimit]).validate_python(
|
883
898
|
response.json()
|
884
899
|
)
|
885
900
|
|
886
901
|
async def reset_concurrency_limit_by_tag(
|
887
902
|
self,
|
888
903
|
tag: str,
|
889
|
-
slot_override: Optional[
|
890
|
-
):
|
904
|
+
slot_override: Optional[list[Union[UUID, str]]] = None,
|
905
|
+
) -> None:
|
891
906
|
"""
|
892
907
|
Resets the concurrency limit slots set on a specific tag.
|
893
908
|
|
@@ -920,7 +935,7 @@ class PrefectClient:
|
|
920
935
|
async def delete_concurrency_limit_by_tag(
|
921
936
|
self,
|
922
937
|
tag: str,
|
923
|
-
):
|
938
|
+
) -> None:
|
924
939
|
"""
|
925
940
|
Delete the concurrency limit set on a specific tag.
|
926
941
|
|
@@ -944,7 +959,7 @@ class PrefectClient:
|
|
944
959
|
|
945
960
|
async def increment_v1_concurrency_slots(
|
946
961
|
self,
|
947
|
-
names:
|
962
|
+
names: list[str],
|
948
963
|
task_run_id: UUID,
|
949
964
|
) -> httpx.Response:
|
950
965
|
"""
|
@@ -954,7 +969,7 @@ class PrefectClient:
|
|
954
969
|
names (List[str]): A list of limit names for which to increment limits.
|
955
970
|
task_run_id (UUID): The task run ID incrementing the limits.
|
956
971
|
"""
|
957
|
-
data = {
|
972
|
+
data: dict[str, Any] = {
|
958
973
|
"names": names,
|
959
974
|
"task_run_id": str(task_run_id),
|
960
975
|
}
|
@@ -966,7 +981,7 @@ class PrefectClient:
|
|
966
981
|
|
967
982
|
async def decrement_v1_concurrency_slots(
|
968
983
|
self,
|
969
|
-
names:
|
984
|
+
names: list[str],
|
970
985
|
task_run_id: UUID,
|
971
986
|
occupancy_seconds: float,
|
972
987
|
) -> httpx.Response:
|
@@ -982,7 +997,7 @@ class PrefectClient:
|
|
982
997
|
Returns:
|
983
998
|
httpx.Response: The HTTP response from the server.
|
984
999
|
"""
|
985
|
-
data = {
|
1000
|
+
data: dict[str, Any] = {
|
986
1001
|
"names": names,
|
987
1002
|
"task_run_id": str(task_run_id),
|
988
1003
|
"occupancy_seconds": occupancy_seconds,
|
@@ -1082,7 +1097,7 @@ class PrefectClient:
|
|
1082
1097
|
|
1083
1098
|
return WorkQueue.model_validate(response.json())
|
1084
1099
|
|
1085
|
-
async def update_work_queue(self, id: UUID, **kwargs):
|
1100
|
+
async def update_work_queue(self, id: UUID, **kwargs: Any) -> None:
|
1086
1101
|
"""
|
1087
1102
|
Update properties of a work queue.
|
1088
1103
|
|
@@ -1112,8 +1127,8 @@ class PrefectClient:
|
|
1112
1127
|
self,
|
1113
1128
|
id: UUID,
|
1114
1129
|
limit: int = 10,
|
1115
|
-
scheduled_before: datetime.datetime = None,
|
1116
|
-
) ->
|
1130
|
+
scheduled_before: Optional[datetime.datetime] = None,
|
1131
|
+
) -> list[FlowRun]:
|
1117
1132
|
"""
|
1118
1133
|
Read flow runs off a work queue.
|
1119
1134
|
|
@@ -1146,7 +1161,7 @@ class PrefectClient:
|
|
1146
1161
|
raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
|
1147
1162
|
else:
|
1148
1163
|
raise
|
1149
|
-
return pydantic.TypeAdapter(
|
1164
|
+
return pydantic.TypeAdapter(list[FlowRun]).validate_python(response.json())
|
1150
1165
|
|
1151
1166
|
async def read_work_queue(
|
1152
1167
|
self,
|
@@ -1202,9 +1217,9 @@ class PrefectClient:
|
|
1202
1217
|
|
1203
1218
|
async def match_work_queues(
|
1204
1219
|
self,
|
1205
|
-
prefixes:
|
1220
|
+
prefixes: list[str],
|
1206
1221
|
work_pool_name: Optional[str] = None,
|
1207
|
-
) ->
|
1222
|
+
) -> list[WorkQueue]:
|
1208
1223
|
"""
|
1209
1224
|
Query the Prefect API for work queues with names with a specific prefix.
|
1210
1225
|
|
@@ -1218,7 +1233,7 @@ class PrefectClient:
|
|
1218
1233
|
"""
|
1219
1234
|
page_length = 100
|
1220
1235
|
current_page = 0
|
1221
|
-
work_queues = []
|
1236
|
+
work_queues: list[WorkQueue] = []
|
1222
1237
|
|
1223
1238
|
while True:
|
1224
1239
|
new_queues = await self.read_work_queues(
|
@@ -1239,7 +1254,7 @@ class PrefectClient:
|
|
1239
1254
|
async def delete_work_queue_by_id(
|
1240
1255
|
self,
|
1241
1256
|
id: UUID,
|
1242
|
-
):
|
1257
|
+
) -> None:
|
1243
1258
|
"""
|
1244
1259
|
Delete a work queue by its ID.
|
1245
1260
|
|
@@ -1336,7 +1351,7 @@ class PrefectClient:
|
|
1336
1351
|
self,
|
1337
1352
|
block_document_id: UUID,
|
1338
1353
|
block_document: BlockDocumentUpdate,
|
1339
|
-
):
|
1354
|
+
) -> None:
|
1340
1355
|
"""
|
1341
1356
|
Update a block document in the Prefect API.
|
1342
1357
|
"""
|
@@ -1355,7 +1370,7 @@ class PrefectClient:
|
|
1355
1370
|
else:
|
1356
1371
|
raise
|
1357
1372
|
|
1358
|
-
async def delete_block_document(self, block_document_id: UUID):
|
1373
|
+
async def delete_block_document(self, block_document_id: UUID) -> None:
|
1359
1374
|
"""
|
1360
1375
|
Delete a block document.
|
1361
1376
|
"""
|
@@ -1398,7 +1413,9 @@ class PrefectClient:
|
|
1398
1413
|
raise
|
1399
1414
|
return BlockSchema.model_validate(response.json())
|
1400
1415
|
|
1401
|
-
async def update_block_type(
|
1416
|
+
async def update_block_type(
|
1417
|
+
self, block_type_id: UUID, block_type: BlockTypeUpdate
|
1418
|
+
) -> None:
|
1402
1419
|
"""
|
1403
1420
|
Update a block document in the Prefect API.
|
1404
1421
|
"""
|
@@ -1417,7 +1434,7 @@ class PrefectClient:
|
|
1417
1434
|
else:
|
1418
1435
|
raise
|
1419
1436
|
|
1420
|
-
async def delete_block_type(self, block_type_id: UUID):
|
1437
|
+
async def delete_block_type(self, block_type_id: UUID) -> None:
|
1421
1438
|
"""
|
1422
1439
|
Delete a block type.
|
1423
1440
|
"""
|
@@ -1437,7 +1454,7 @@ class PrefectClient:
|
|
1437
1454
|
else:
|
1438
1455
|
raise
|
1439
1456
|
|
1440
|
-
async def read_block_types(self) ->
|
1457
|
+
async def read_block_types(self) -> list[BlockType]:
|
1441
1458
|
"""
|
1442
1459
|
Read all block types
|
1443
1460
|
Raises:
|
@@ -1447,9 +1464,9 @@ class PrefectClient:
|
|
1447
1464
|
List of BlockTypes.
|
1448
1465
|
"""
|
1449
1466
|
response = await self._client.post("/block_types/filter", json={})
|
1450
|
-
return pydantic.TypeAdapter(
|
1467
|
+
return pydantic.TypeAdapter(list[BlockType]).validate_python(response.json())
|
1451
1468
|
|
1452
|
-
async def read_block_schemas(self) ->
|
1469
|
+
async def read_block_schemas(self) -> list[BlockSchema]:
|
1453
1470
|
"""
|
1454
1471
|
Read all block schemas
|
1455
1472
|
Raises:
|
@@ -1459,7 +1476,7 @@ class PrefectClient:
|
|
1459
1476
|
A BlockSchema.
|
1460
1477
|
"""
|
1461
1478
|
response = await self._client.post("/block_schemas/filter", json={})
|
1462
|
-
return pydantic.TypeAdapter(
|
1479
|
+
return pydantic.TypeAdapter(list[BlockSchema]).validate_python(response.json())
|
1463
1480
|
|
1464
1481
|
async def get_most_recent_block_schema_for_block_type(
|
1465
1482
|
self,
|
@@ -1495,7 +1512,7 @@ class PrefectClient:
|
|
1495
1512
|
self,
|
1496
1513
|
block_document_id: UUID,
|
1497
1514
|
include_secrets: bool = True,
|
1498
|
-
):
|
1515
|
+
) -> BlockDocument:
|
1499
1516
|
"""
|
1500
1517
|
Read the block document with the specified ID.
|
1501
1518
|
|
@@ -1573,7 +1590,7 @@ class PrefectClient:
|
|
1573
1590
|
offset: Optional[int] = None,
|
1574
1591
|
limit: Optional[int] = None,
|
1575
1592
|
include_secrets: bool = True,
|
1576
|
-
):
|
1593
|
+
) -> list[BlockDocument]:
|
1577
1594
|
"""
|
1578
1595
|
Read block documents
|
1579
1596
|
|
@@ -1600,7 +1617,7 @@ class PrefectClient:
|
|
1600
1617
|
include_secrets=include_secrets,
|
1601
1618
|
),
|
1602
1619
|
)
|
1603
|
-
return pydantic.TypeAdapter(
|
1620
|
+
return pydantic.TypeAdapter(list[BlockDocument]).validate_python(
|
1604
1621
|
response.json()
|
1605
1622
|
)
|
1606
1623
|
|
@@ -1610,7 +1627,7 @@ class PrefectClient:
|
|
1610
1627
|
offset: Optional[int] = None,
|
1611
1628
|
limit: Optional[int] = None,
|
1612
1629
|
include_secrets: bool = True,
|
1613
|
-
) ->
|
1630
|
+
) -> list[BlockDocument]:
|
1614
1631
|
"""Retrieve block documents by block type slug.
|
1615
1632
|
|
1616
1633
|
Args:
|
@@ -1631,7 +1648,7 @@ class PrefectClient:
|
|
1631
1648
|
),
|
1632
1649
|
)
|
1633
1650
|
|
1634
|
-
return pydantic.TypeAdapter(
|
1651
|
+
return pydantic.TypeAdapter(list[BlockDocument]).validate_python(
|
1635
1652
|
response.json()
|
1636
1653
|
)
|
1637
1654
|
|
@@ -1640,23 +1657,23 @@ class PrefectClient:
|
|
1640
1657
|
flow_id: UUID,
|
1641
1658
|
name: str,
|
1642
1659
|
version: Optional[str] = None,
|
1643
|
-
schedules: Optional[
|
1660
|
+
schedules: Optional[list[DeploymentScheduleCreate]] = None,
|
1644
1661
|
concurrency_limit: Optional[int] = None,
|
1645
1662
|
concurrency_options: Optional[ConcurrencyOptions] = None,
|
1646
|
-
parameters: Optional[
|
1663
|
+
parameters: Optional[dict[str, Any]] = None,
|
1647
1664
|
description: Optional[str] = None,
|
1648
1665
|
work_queue_name: Optional[str] = None,
|
1649
1666
|
work_pool_name: Optional[str] = None,
|
1650
|
-
tags: Optional[
|
1667
|
+
tags: Optional[list[str]] = None,
|
1651
1668
|
storage_document_id: Optional[UUID] = None,
|
1652
1669
|
path: Optional[str] = None,
|
1653
1670
|
entrypoint: Optional[str] = None,
|
1654
1671
|
infrastructure_document_id: Optional[UUID] = None,
|
1655
|
-
parameter_openapi_schema: Optional[
|
1672
|
+
parameter_openapi_schema: Optional[dict[str, Any]] = None,
|
1656
1673
|
paused: Optional[bool] = None,
|
1657
|
-
pull_steps: Optional[
|
1674
|
+
pull_steps: Optional[list[dict[str, Any]]] = None,
|
1658
1675
|
enforce_parameter_schema: Optional[bool] = None,
|
1659
|
-
job_variables: Optional[
|
1676
|
+
job_variables: Optional[dict[str, Any]] = None,
|
1660
1677
|
) -> UUID:
|
1661
1678
|
"""
|
1662
1679
|
Create a deployment.
|
@@ -1736,7 +1753,9 @@ class PrefectClient:
|
|
1736
1753
|
|
1737
1754
|
return UUID(deployment_id)
|
1738
1755
|
|
1739
|
-
async def set_deployment_paused_state(
|
1756
|
+
async def set_deployment_paused_state(
|
1757
|
+
self, deployment_id: UUID, paused: bool
|
1758
|
+
) -> None:
|
1740
1759
|
await self._client.patch(
|
1741
1760
|
f"/deployments/{deployment_id}", json={"paused": paused}
|
1742
1761
|
)
|
@@ -1745,7 +1764,7 @@ class PrefectClient:
|
|
1745
1764
|
self,
|
1746
1765
|
deployment_id: UUID,
|
1747
1766
|
deployment: DeploymentUpdate,
|
1748
|
-
):
|
1767
|
+
) -> None:
|
1749
1768
|
await self._client.patch(
|
1750
1769
|
f"/deployments/{deployment_id}",
|
1751
1770
|
json=deployment.model_dump(mode="json", exclude_unset=True),
|
@@ -1768,7 +1787,7 @@ class PrefectClient:
|
|
1768
1787
|
|
1769
1788
|
async def read_deployment(
|
1770
1789
|
self,
|
1771
|
-
deployment_id: UUID,
|
1790
|
+
deployment_id: Union[UUID, str],
|
1772
1791
|
) -> DeploymentResponse:
|
1773
1792
|
"""
|
1774
1793
|
Query the Prefect API for a deployment by id.
|
@@ -1861,7 +1880,7 @@ class PrefectClient:
|
|
1861
1880
|
limit: Optional[int] = None,
|
1862
1881
|
sort: Optional[DeploymentSort] = None,
|
1863
1882
|
offset: int = 0,
|
1864
|
-
) ->
|
1883
|
+
) -> list[DeploymentResponse]:
|
1865
1884
|
"""
|
1866
1885
|
Query the Prefect API for deployments. Only deployments matching all
|
1867
1886
|
the provided criteria will be returned.
|
@@ -1880,7 +1899,7 @@ class PrefectClient:
|
|
1880
1899
|
a list of Deployment model representations
|
1881
1900
|
of the deployments
|
1882
1901
|
"""
|
1883
|
-
body = {
|
1902
|
+
body: dict[str, Any] = {
|
1884
1903
|
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
|
1885
1904
|
"flow_runs": (
|
1886
1905
|
flow_run_filter.model_dump(mode="json", exclude_unset=True)
|
@@ -1905,14 +1924,14 @@ class PrefectClient:
|
|
1905
1924
|
}
|
1906
1925
|
|
1907
1926
|
response = await self._client.post("/deployments/filter", json=body)
|
1908
|
-
return pydantic.TypeAdapter(
|
1927
|
+
return pydantic.TypeAdapter(list[DeploymentResponse]).validate_python(
|
1909
1928
|
response.json()
|
1910
1929
|
)
|
1911
1930
|
|
1912
1931
|
async def delete_deployment(
|
1913
1932
|
self,
|
1914
1933
|
deployment_id: UUID,
|
1915
|
-
):
|
1934
|
+
) -> None:
|
1916
1935
|
"""
|
1917
1936
|
Delete deployment by id.
|
1918
1937
|
|
@@ -1933,8 +1952,8 @@ class PrefectClient:
|
|
1933
1952
|
async def create_deployment_schedules(
|
1934
1953
|
self,
|
1935
1954
|
deployment_id: UUID,
|
1936
|
-
schedules:
|
1937
|
-
) ->
|
1955
|
+
schedules: list[tuple[SCHEDULE_TYPES, bool]],
|
1956
|
+
) -> list[DeploymentSchedule]:
|
1938
1957
|
"""
|
1939
1958
|
Create deployment schedules.
|
1940
1959
|
|
@@ -1961,14 +1980,14 @@ class PrefectClient:
|
|
1961
1980
|
response = await self._client.post(
|
1962
1981
|
f"/deployments/{deployment_id}/schedules", json=json
|
1963
1982
|
)
|
1964
|
-
return pydantic.TypeAdapter(
|
1983
|
+
return pydantic.TypeAdapter(list[DeploymentSchedule]).validate_python(
|
1965
1984
|
response.json()
|
1966
1985
|
)
|
1967
1986
|
|
1968
1987
|
async def read_deployment_schedules(
|
1969
1988
|
self,
|
1970
1989
|
deployment_id: UUID,
|
1971
|
-
) ->
|
1990
|
+
) -> list[DeploymentSchedule]:
|
1972
1991
|
"""
|
1973
1992
|
Query the Prefect API for a deployment's schedules.
|
1974
1993
|
|
@@ -1985,7 +2004,7 @@ class PrefectClient:
|
|
1985
2004
|
raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
|
1986
2005
|
else:
|
1987
2006
|
raise
|
1988
|
-
return pydantic.TypeAdapter(
|
2007
|
+
return pydantic.TypeAdapter(list[DeploymentSchedule]).validate_python(
|
1989
2008
|
response.json()
|
1990
2009
|
)
|
1991
2010
|
|
@@ -1995,7 +2014,7 @@ class PrefectClient:
|
|
1995
2014
|
schedule_id: UUID,
|
1996
2015
|
active: Optional[bool] = None,
|
1997
2016
|
schedule: Optional[SCHEDULE_TYPES] = None,
|
1998
|
-
):
|
2017
|
+
) -> None:
|
1999
2018
|
"""
|
2000
2019
|
Update a deployment schedule by ID.
|
2001
2020
|
|
@@ -2005,7 +2024,7 @@ class PrefectClient:
|
|
2005
2024
|
active: whether or not the schedule should be active
|
2006
2025
|
schedule: the cron, rrule, or interval schedule this deployment schedule should use
|
2007
2026
|
"""
|
2008
|
-
kwargs = {}
|
2027
|
+
kwargs: dict[str, Any] = {}
|
2009
2028
|
if active is not None:
|
2010
2029
|
kwargs["active"] = active
|
2011
2030
|
if schedule is not None:
|
@@ -2069,8 +2088,8 @@ class PrefectClient:
|
|
2069
2088
|
return FlowRun.model_validate(response.json())
|
2070
2089
|
|
2071
2090
|
async def resume_flow_run(
|
2072
|
-
self, flow_run_id: UUID, run_input: Optional[
|
2073
|
-
) -> OrchestrationResult:
|
2091
|
+
self, flow_run_id: UUID, run_input: Optional[dict[str, Any]] = None
|
2092
|
+
) -> OrchestrationResult[Any]:
|
2074
2093
|
"""
|
2075
2094
|
Resumes a paused flow run.
|
2076
2095
|
|
@@ -2088,21 +2107,24 @@ class PrefectClient:
|
|
2088
2107
|
except httpx.HTTPStatusError:
|
2089
2108
|
raise
|
2090
2109
|
|
2091
|
-
|
2110
|
+
result: OrchestrationResult[Any] = OrchestrationResult.model_validate(
|
2111
|
+
response.json()
|
2112
|
+
)
|
2113
|
+
return result
|
2092
2114
|
|
2093
2115
|
async def read_flow_runs(
|
2094
2116
|
self,
|
2095
2117
|
*,
|
2096
|
-
flow_filter: FlowFilter = None,
|
2097
|
-
flow_run_filter: FlowRunFilter = None,
|
2098
|
-
task_run_filter: TaskRunFilter = None,
|
2099
|
-
deployment_filter: DeploymentFilter = None,
|
2100
|
-
work_pool_filter: WorkPoolFilter = None,
|
2101
|
-
work_queue_filter: WorkQueueFilter = None,
|
2102
|
-
sort: FlowRunSort = None,
|
2118
|
+
flow_filter: Optional[FlowFilter] = None,
|
2119
|
+
flow_run_filter: Optional[FlowRunFilter] = None,
|
2120
|
+
task_run_filter: Optional[TaskRunFilter] = None,
|
2121
|
+
deployment_filter: Optional[DeploymentFilter] = None,
|
2122
|
+
work_pool_filter: Optional[WorkPoolFilter] = None,
|
2123
|
+
work_queue_filter: Optional[WorkQueueFilter] = None,
|
2124
|
+
sort: Optional[FlowRunSort] = None,
|
2103
2125
|
limit: Optional[int] = None,
|
2104
2126
|
offset: int = 0,
|
2105
|
-
) ->
|
2127
|
+
) -> list[FlowRun]:
|
2106
2128
|
"""
|
2107
2129
|
Query the Prefect API for flow runs. Only flow runs matching all criteria will
|
2108
2130
|
be returned.
|
@@ -2122,7 +2144,7 @@ class PrefectClient:
|
|
2122
2144
|
a list of Flow Run model representations
|
2123
2145
|
of the flow runs
|
2124
2146
|
"""
|
2125
|
-
body = {
|
2147
|
+
body: dict[str, Any] = {
|
2126
2148
|
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
|
2127
2149
|
"flow_runs": (
|
2128
2150
|
flow_run_filter.model_dump(mode="json", exclude_unset=True)
|
@@ -2147,14 +2169,14 @@ class PrefectClient:
|
|
2147
2169
|
}
|
2148
2170
|
|
2149
2171
|
response = await self._client.post("/flow_runs/filter", json=body)
|
2150
|
-
return pydantic.TypeAdapter(
|
2172
|
+
return pydantic.TypeAdapter(list[FlowRun]).validate_python(response.json())
|
2151
2173
|
|
2152
2174
|
async def set_flow_run_state(
|
2153
2175
|
self,
|
2154
|
-
flow_run_id: UUID,
|
2155
|
-
state: "prefect.states.State",
|
2176
|
+
flow_run_id: Union[UUID, str],
|
2177
|
+
state: "prefect.states.State[T]",
|
2156
2178
|
force: bool = False,
|
2157
|
-
) -> OrchestrationResult:
|
2179
|
+
) -> OrchestrationResult[T]:
|
2158
2180
|
"""
|
2159
2181
|
Set the state of a flow run.
|
2160
2182
|
|
@@ -2187,11 +2209,14 @@ class PrefectClient:
|
|
2187
2209
|
else:
|
2188
2210
|
raise
|
2189
2211
|
|
2190
|
-
|
2212
|
+
result: OrchestrationResult[T] = OrchestrationResult.model_validate(
|
2213
|
+
response.json()
|
2214
|
+
)
|
2215
|
+
return result
|
2191
2216
|
|
2192
2217
|
async def read_flow_run_states(
|
2193
2218
|
self, flow_run_id: UUID
|
2194
|
-
) ->
|
2219
|
+
) -> list[prefect.states.State]:
|
2195
2220
|
"""
|
2196
2221
|
Query for the states of a flow run
|
2197
2222
|
|
@@ -2205,11 +2230,18 @@ class PrefectClient:
|
|
2205
2230
|
response = await self._client.get(
|
2206
2231
|
"/flow_run_states/", params=dict(flow_run_id=str(flow_run_id))
|
2207
2232
|
)
|
2208
|
-
return pydantic.TypeAdapter(
|
2233
|
+
return pydantic.TypeAdapter(list[prefect.states.State]).validate_python(
|
2209
2234
|
response.json()
|
2210
2235
|
)
|
2211
2236
|
|
2212
|
-
async def
|
2237
|
+
async def set_flow_run_name(self, flow_run_id: UUID, name: str) -> httpx.Response:
|
2238
|
+
flow_run_data = FlowRunUpdate(name=name)
|
2239
|
+
return await self._client.patch(
|
2240
|
+
f"/flow_runs/{flow_run_id}",
|
2241
|
+
json=flow_run_data.model_dump(mode="json", exclude_unset=True),
|
2242
|
+
)
|
2243
|
+
|
2244
|
+
async def set_task_run_name(self, task_run_id: UUID, name: str) -> httpx.Response:
|
2213
2245
|
task_run_data = TaskRunUpdate(name=name)
|
2214
2246
|
return await self._client.patch(
|
2215
2247
|
f"/task_runs/{task_run_id}",
|
@@ -2226,9 +2258,9 @@ class PrefectClient:
|
|
2226
2258
|
extra_tags: Optional[Iterable[str]] = None,
|
2227
2259
|
state: Optional[prefect.states.State[R]] = None,
|
2228
2260
|
task_inputs: Optional[
|
2229
|
-
|
2261
|
+
dict[
|
2230
2262
|
str,
|
2231
|
-
|
2263
|
+
list[
|
2232
2264
|
Union[
|
2233
2265
|
TaskRunResult,
|
2234
2266
|
Parameter,
|
@@ -2262,6 +2294,12 @@ class PrefectClient:
|
|
2262
2294
|
if state is None:
|
2263
2295
|
state = prefect.states.Pending()
|
2264
2296
|
|
2297
|
+
retry_delay = task.retry_delay_seconds
|
2298
|
+
if isinstance(retry_delay, list):
|
2299
|
+
retry_delay = [int(rd) for rd in retry_delay]
|
2300
|
+
elif isinstance(retry_delay, float):
|
2301
|
+
retry_delay = int(retry_delay)
|
2302
|
+
|
2265
2303
|
task_run_data = TaskRunCreate(
|
2266
2304
|
id=id,
|
2267
2305
|
name=name,
|
@@ -2272,7 +2310,7 @@ class PrefectClient:
|
|
2272
2310
|
task_version=task.version,
|
2273
2311
|
empirical_policy=TaskRunPolicy(
|
2274
2312
|
retries=task.retries,
|
2275
|
-
retry_delay=
|
2313
|
+
retry_delay=retry_delay,
|
2276
2314
|
retry_jitter_factor=task.retry_jitter_factor,
|
2277
2315
|
),
|
2278
2316
|
state=state.to_state_create(),
|
@@ -2305,14 +2343,14 @@ class PrefectClient:
|
|
2305
2343
|
async def read_task_runs(
|
2306
2344
|
self,
|
2307
2345
|
*,
|
2308
|
-
flow_filter: FlowFilter = None,
|
2309
|
-
flow_run_filter: FlowRunFilter = None,
|
2310
|
-
task_run_filter: TaskRunFilter = None,
|
2311
|
-
deployment_filter: DeploymentFilter = None,
|
2312
|
-
sort: TaskRunSort = None,
|
2346
|
+
flow_filter: Optional[FlowFilter] = None,
|
2347
|
+
flow_run_filter: Optional[FlowRunFilter] = None,
|
2348
|
+
task_run_filter: Optional[TaskRunFilter] = None,
|
2349
|
+
deployment_filter: Optional[DeploymentFilter] = None,
|
2350
|
+
sort: Optional[TaskRunSort] = None,
|
2313
2351
|
limit: Optional[int] = None,
|
2314
2352
|
offset: int = 0,
|
2315
|
-
) ->
|
2353
|
+
) -> list[TaskRun]:
|
2316
2354
|
"""
|
2317
2355
|
Query the Prefect API for task runs. Only task runs matching all criteria will
|
2318
2356
|
be returned.
|
@@ -2330,7 +2368,7 @@ class PrefectClient:
|
|
2330
2368
|
a list of Task Run model representations
|
2331
2369
|
of the task runs
|
2332
2370
|
"""
|
2333
|
-
body = {
|
2371
|
+
body: dict[str, Any] = {
|
2334
2372
|
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
|
2335
2373
|
"flow_runs": (
|
2336
2374
|
flow_run_filter.model_dump(mode="json", exclude_unset=True)
|
@@ -2348,7 +2386,7 @@ class PrefectClient:
|
|
2348
2386
|
"offset": offset,
|
2349
2387
|
}
|
2350
2388
|
response = await self._client.post("/task_runs/filter", json=body)
|
2351
|
-
return pydantic.TypeAdapter(
|
2389
|
+
return pydantic.TypeAdapter(list[TaskRun]).validate_python(response.json())
|
2352
2390
|
|
2353
2391
|
async def delete_task_run(self, task_run_id: UUID) -> None:
|
2354
2392
|
"""
|
@@ -2371,9 +2409,9 @@ class PrefectClient:
|
|
2371
2409
|
async def set_task_run_state(
|
2372
2410
|
self,
|
2373
2411
|
task_run_id: UUID,
|
2374
|
-
state: prefect.states.State,
|
2412
|
+
state: prefect.states.State[T],
|
2375
2413
|
force: bool = False,
|
2376
|
-
) -> OrchestrationResult:
|
2414
|
+
) -> OrchestrationResult[T]:
|
2377
2415
|
"""
|
2378
2416
|
Set the state of a task run.
|
2379
2417
|
|
@@ -2392,11 +2430,14 @@ class PrefectClient:
|
|
2392
2430
|
f"/task_runs/{task_run_id}/set_state",
|
2393
2431
|
json=dict(state=state_create.model_dump(mode="json"), force=force),
|
2394
2432
|
)
|
2395
|
-
|
2433
|
+
result: OrchestrationResult[T] = OrchestrationResult.model_validate(
|
2434
|
+
response.json()
|
2435
|
+
)
|
2436
|
+
return result
|
2396
2437
|
|
2397
2438
|
async def read_task_run_states(
|
2398
2439
|
self, task_run_id: UUID
|
2399
|
-
) ->
|
2440
|
+
) -> list[prefect.states.State]:
|
2400
2441
|
"""
|
2401
2442
|
Query for the states of a task run
|
2402
2443
|
|
@@ -2409,11 +2450,13 @@ class PrefectClient:
|
|
2409
2450
|
response = await self._client.get(
|
2410
2451
|
"/task_run_states/", params=dict(task_run_id=str(task_run_id))
|
2411
2452
|
)
|
2412
|
-
return pydantic.TypeAdapter(
|
2453
|
+
return pydantic.TypeAdapter(list[prefect.states.State]).validate_python(
|
2413
2454
|
response.json()
|
2414
2455
|
)
|
2415
2456
|
|
2416
|
-
async def create_logs(
|
2457
|
+
async def create_logs(
|
2458
|
+
self, logs: Iterable[Union[LogCreate, dict[str, Any]]]
|
2459
|
+
) -> None:
|
2417
2460
|
"""
|
2418
2461
|
Create logs for a flow or task run
|
2419
2462
|
|
@@ -2430,8 +2473,8 @@ class PrefectClient:
|
|
2430
2473
|
self,
|
2431
2474
|
block_document_id: UUID,
|
2432
2475
|
is_active: bool = True,
|
2433
|
-
tags:
|
2434
|
-
state_names:
|
2476
|
+
tags: Optional[list[str]] = None,
|
2477
|
+
state_names: Optional[list[str]] = None,
|
2435
2478
|
message_template: Optional[str] = None,
|
2436
2479
|
) -> UUID:
|
2437
2480
|
"""
|
@@ -2493,8 +2536,8 @@ class PrefectClient:
|
|
2493
2536
|
id: UUID,
|
2494
2537
|
block_document_id: Optional[UUID] = None,
|
2495
2538
|
is_active: Optional[bool] = None,
|
2496
|
-
tags: Optional[
|
2497
|
-
state_names: Optional[
|
2539
|
+
tags: Optional[list[str]] = None,
|
2540
|
+
state_names: Optional[list[str]] = None,
|
2498
2541
|
message_template: Optional[str] = None,
|
2499
2542
|
) -> None:
|
2500
2543
|
"""
|
@@ -2511,7 +2554,7 @@ class PrefectClient:
|
|
2511
2554
|
prefect.exceptions.ObjectNotFound: If request returns 404
|
2512
2555
|
httpx.RequestError: If requests fails
|
2513
2556
|
"""
|
2514
|
-
params = {}
|
2557
|
+
params: dict[str, Any] = {}
|
2515
2558
|
if block_document_id is not None:
|
2516
2559
|
params["block_document_id"] = block_document_id
|
2517
2560
|
if is_active is not None:
|
@@ -2541,7 +2584,7 @@ class PrefectClient:
|
|
2541
2584
|
flow_run_notification_policy_filter: FlowRunNotificationPolicyFilter,
|
2542
2585
|
limit: Optional[int] = None,
|
2543
2586
|
offset: int = 0,
|
2544
|
-
) ->
|
2587
|
+
) -> list[FlowRunNotificationPolicy]:
|
2545
2588
|
"""
|
2546
2589
|
Query the Prefect API for flow run notification policies. Only policies matching all criteria will
|
2547
2590
|
be returned.
|
@@ -2555,7 +2598,7 @@ class PrefectClient:
|
|
2555
2598
|
a list of FlowRunNotificationPolicy model representations
|
2556
2599
|
of the notification policies
|
2557
2600
|
"""
|
2558
|
-
body = {
|
2601
|
+
body: dict[str, Any] = {
|
2559
2602
|
"flow_run_notification_policy_filter": (
|
2560
2603
|
flow_run_notification_policy_filter.model_dump(mode="json")
|
2561
2604
|
if flow_run_notification_policy_filter
|
@@ -2567,7 +2610,7 @@ class PrefectClient:
|
|
2567
2610
|
response = await self._client.post(
|
2568
2611
|
"/flow_run_notification_policies/filter", json=body
|
2569
2612
|
)
|
2570
|
-
return pydantic.TypeAdapter(
|
2613
|
+
return pydantic.TypeAdapter(list[FlowRunNotificationPolicy]).validate_python(
|
2571
2614
|
response.json()
|
2572
2615
|
)
|
2573
2616
|
|
@@ -2577,11 +2620,11 @@ class PrefectClient:
|
|
2577
2620
|
limit: Optional[int] = None,
|
2578
2621
|
offset: Optional[int] = None,
|
2579
2622
|
sort: LogSort = LogSort.TIMESTAMP_ASC,
|
2580
|
-
) ->
|
2623
|
+
) -> list[Log]:
|
2581
2624
|
"""
|
2582
2625
|
Read flow and task run logs.
|
2583
2626
|
"""
|
2584
|
-
body = {
|
2627
|
+
body: dict[str, Any] = {
|
2585
2628
|
"logs": log_filter.model_dump(mode="json") if log_filter else None,
|
2586
2629
|
"limit": limit,
|
2587
2630
|
"offset": offset,
|
@@ -2589,7 +2632,7 @@ class PrefectClient:
|
|
2589
2632
|
}
|
2590
2633
|
|
2591
2634
|
response = await self._client.post("/logs/filter", json=body)
|
2592
|
-
return pydantic.TypeAdapter(
|
2635
|
+
return pydantic.TypeAdapter(list[Log]).validate_python(response.json())
|
2593
2636
|
|
2594
2637
|
async def send_worker_heartbeat(
|
2595
2638
|
self,
|
@@ -2608,7 +2651,7 @@ class PrefectClient:
|
|
2608
2651
|
return_id: Whether to return the worker ID. Note: will return `None` if the connected server does not support returning worker IDs, even if `return_id` is `True`.
|
2609
2652
|
worker_metadata: Metadata about the worker to send to the server.
|
2610
2653
|
"""
|
2611
|
-
params = {
|
2654
|
+
params: dict[str, Any] = {
|
2612
2655
|
"name": worker_name,
|
2613
2656
|
"heartbeat_interval_seconds": heartbeat_interval_seconds,
|
2614
2657
|
}
|
@@ -2640,7 +2683,7 @@ class PrefectClient:
|
|
2640
2683
|
worker_filter: Optional[WorkerFilter] = None,
|
2641
2684
|
offset: Optional[int] = None,
|
2642
2685
|
limit: Optional[int] = None,
|
2643
|
-
) ->
|
2686
|
+
) -> list[Worker]:
|
2644
2687
|
"""
|
2645
2688
|
Reads workers for a given work pool.
|
2646
2689
|
|
@@ -2664,7 +2707,7 @@ class PrefectClient:
|
|
2664
2707
|
},
|
2665
2708
|
)
|
2666
2709
|
|
2667
|
-
return pydantic.TypeAdapter(
|
2710
|
+
return pydantic.TypeAdapter(list[Worker]).validate_python(response.json())
|
2668
2711
|
|
2669
2712
|
async def read_work_pool(self, work_pool_name: str) -> WorkPool:
|
2670
2713
|
"""
|
@@ -2691,7 +2734,7 @@ class PrefectClient:
|
|
2691
2734
|
limit: Optional[int] = None,
|
2692
2735
|
offset: int = 0,
|
2693
2736
|
work_pool_filter: Optional[WorkPoolFilter] = None,
|
2694
|
-
) ->
|
2737
|
+
) -> list[WorkPool]:
|
2695
2738
|
"""
|
2696
2739
|
Reads work pools.
|
2697
2740
|
|
@@ -2704,7 +2747,7 @@ class PrefectClient:
|
|
2704
2747
|
A list of work pools.
|
2705
2748
|
"""
|
2706
2749
|
|
2707
|
-
body = {
|
2750
|
+
body: dict[str, Any] = {
|
2708
2751
|
"limit": limit,
|
2709
2752
|
"offset": offset,
|
2710
2753
|
"work_pools": (
|
@@ -2712,7 +2755,7 @@ class PrefectClient:
|
|
2712
2755
|
),
|
2713
2756
|
}
|
2714
2757
|
response = await self._client.post("/work_pools/filter", json=body)
|
2715
|
-
return pydantic.TypeAdapter(
|
2758
|
+
return pydantic.TypeAdapter(list[WorkPool]).validate_python(response.json())
|
2716
2759
|
|
2717
2760
|
async def create_work_pool(
|
2718
2761
|
self,
|
@@ -2762,7 +2805,7 @@ class PrefectClient:
|
|
2762
2805
|
self,
|
2763
2806
|
work_pool_name: str,
|
2764
2807
|
work_pool: WorkPoolUpdate,
|
2765
|
-
):
|
2808
|
+
) -> None:
|
2766
2809
|
"""
|
2767
2810
|
Updates a work pool.
|
2768
2811
|
|
@@ -2784,7 +2827,7 @@ class PrefectClient:
|
|
2784
2827
|
async def delete_work_pool(
|
2785
2828
|
self,
|
2786
2829
|
work_pool_name: str,
|
2787
|
-
):
|
2830
|
+
) -> None:
|
2788
2831
|
"""
|
2789
2832
|
Deletes a work pool.
|
2790
2833
|
|
@@ -2805,7 +2848,7 @@ class PrefectClient:
|
|
2805
2848
|
work_queue_filter: Optional[WorkQueueFilter] = None,
|
2806
2849
|
limit: Optional[int] = None,
|
2807
2850
|
offset: Optional[int] = None,
|
2808
|
-
) ->
|
2851
|
+
) -> list[WorkQueue]:
|
2809
2852
|
"""
|
2810
2853
|
Retrieves queues for a work pool.
|
2811
2854
|
|
@@ -2818,7 +2861,7 @@ class PrefectClient:
|
|
2818
2861
|
Returns:
|
2819
2862
|
List of queues for the specified work pool.
|
2820
2863
|
"""
|
2821
|
-
json = {
|
2864
|
+
json: dict[str, Any] = {
|
2822
2865
|
"work_queues": (
|
2823
2866
|
work_queue_filter.model_dump(mode="json", exclude_unset=True)
|
2824
2867
|
if work_queue_filter
|
@@ -2842,15 +2885,15 @@ class PrefectClient:
|
|
2842
2885
|
else:
|
2843
2886
|
response = await self._client.post("/work_queues/filter", json=json)
|
2844
2887
|
|
2845
|
-
return pydantic.TypeAdapter(
|
2888
|
+
return pydantic.TypeAdapter(list[WorkQueue]).validate_python(response.json())
|
2846
2889
|
|
2847
2890
|
async def get_scheduled_flow_runs_for_deployments(
|
2848
2891
|
self,
|
2849
|
-
deployment_ids:
|
2892
|
+
deployment_ids: list[UUID],
|
2850
2893
|
scheduled_before: Optional[datetime.datetime] = None,
|
2851
2894
|
limit: Optional[int] = None,
|
2852
|
-
) ->
|
2853
|
-
body:
|
2895
|
+
) -> list[FlowRunResponse]:
|
2896
|
+
body: dict[str, Any] = dict(deployment_ids=[str(id) for id in deployment_ids])
|
2854
2897
|
if scheduled_before:
|
2855
2898
|
body["scheduled_before"] = str(scheduled_before)
|
2856
2899
|
if limit:
|
@@ -2861,16 +2904,16 @@ class PrefectClient:
|
|
2861
2904
|
json=body,
|
2862
2905
|
)
|
2863
2906
|
|
2864
|
-
return pydantic.TypeAdapter(
|
2907
|
+
return pydantic.TypeAdapter(list[FlowRunResponse]).validate_python(
|
2865
2908
|
response.json()
|
2866
2909
|
)
|
2867
2910
|
|
2868
2911
|
async def get_scheduled_flow_runs_for_work_pool(
|
2869
2912
|
self,
|
2870
2913
|
work_pool_name: str,
|
2871
|
-
work_queue_names: Optional[
|
2914
|
+
work_queue_names: Optional[list[str]] = None,
|
2872
2915
|
scheduled_before: Optional[datetime.datetime] = None,
|
2873
|
-
) ->
|
2916
|
+
) -> list[WorkerFlowRunResponse]:
|
2874
2917
|
"""
|
2875
2918
|
Retrieves scheduled flow runs for the provided set of work pool queues.
|
2876
2919
|
|
@@ -2886,7 +2929,7 @@ class PrefectClient:
|
|
2886
2929
|
A list of worker flow run responses containing information about the
|
2887
2930
|
retrieved flow runs.
|
2888
2931
|
"""
|
2889
|
-
body:
|
2932
|
+
body: dict[str, Any] = {}
|
2890
2933
|
if work_queue_names is not None:
|
2891
2934
|
body["work_queue_names"] = list(work_queue_names)
|
2892
2935
|
if scheduled_before:
|
@@ -2896,7 +2939,7 @@ class PrefectClient:
|
|
2896
2939
|
f"/work_pools/{work_pool_name}/get_scheduled_flow_runs",
|
2897
2940
|
json=body,
|
2898
2941
|
)
|
2899
|
-
return pydantic.TypeAdapter(
|
2942
|
+
return pydantic.TypeAdapter(list[WorkerFlowRunResponse]).validate_python(
|
2900
2943
|
response.json()
|
2901
2944
|
)
|
2902
2945
|
|
@@ -2942,13 +2985,13 @@ class PrefectClient:
|
|
2942
2985
|
async def read_artifacts(
|
2943
2986
|
self,
|
2944
2987
|
*,
|
2945
|
-
artifact_filter: ArtifactFilter = None,
|
2946
|
-
flow_run_filter: FlowRunFilter = None,
|
2947
|
-
task_run_filter: TaskRunFilter = None,
|
2948
|
-
sort: ArtifactSort = None,
|
2988
|
+
artifact_filter: Optional[ArtifactFilter] = None,
|
2989
|
+
flow_run_filter: Optional[FlowRunFilter] = None,
|
2990
|
+
task_run_filter: Optional[TaskRunFilter] = None,
|
2991
|
+
sort: Optional[ArtifactSort] = None,
|
2949
2992
|
limit: Optional[int] = None,
|
2950
2993
|
offset: int = 0,
|
2951
|
-
) ->
|
2994
|
+
) -> list[Artifact]:
|
2952
2995
|
"""
|
2953
2996
|
Query the Prefect API for artifacts. Only artifacts matching all criteria will
|
2954
2997
|
be returned.
|
@@ -2962,7 +3005,7 @@ class PrefectClient:
|
|
2962
3005
|
Returns:
|
2963
3006
|
a list of Artifact model representations of the artifacts
|
2964
3007
|
"""
|
2965
|
-
body = {
|
3008
|
+
body: dict[str, Any] = {
|
2966
3009
|
"artifacts": (
|
2967
3010
|
artifact_filter.model_dump(mode="json") if artifact_filter else None
|
2968
3011
|
),
|
@@ -2977,18 +3020,18 @@ class PrefectClient:
|
|
2977
3020
|
"offset": offset,
|
2978
3021
|
}
|
2979
3022
|
response = await self._client.post("/artifacts/filter", json=body)
|
2980
|
-
return pydantic.TypeAdapter(
|
3023
|
+
return pydantic.TypeAdapter(list[Artifact]).validate_python(response.json())
|
2981
3024
|
|
2982
3025
|
async def read_latest_artifacts(
|
2983
3026
|
self,
|
2984
3027
|
*,
|
2985
|
-
artifact_filter: ArtifactCollectionFilter = None,
|
2986
|
-
flow_run_filter: FlowRunFilter = None,
|
2987
|
-
task_run_filter: TaskRunFilter = None,
|
2988
|
-
sort: ArtifactCollectionSort = None,
|
3028
|
+
artifact_filter: Optional[ArtifactCollectionFilter] = None,
|
3029
|
+
flow_run_filter: Optional[FlowRunFilter] = None,
|
3030
|
+
task_run_filter: Optional[TaskRunFilter] = None,
|
3031
|
+
sort: Optional[ArtifactCollectionSort] = None,
|
2989
3032
|
limit: Optional[int] = None,
|
2990
3033
|
offset: int = 0,
|
2991
|
-
) ->
|
3034
|
+
) -> list[ArtifactCollection]:
|
2992
3035
|
"""
|
2993
3036
|
Query the Prefect API for artifacts. Only artifacts matching all criteria will
|
2994
3037
|
be returned.
|
@@ -3002,7 +3045,7 @@ class PrefectClient:
|
|
3002
3045
|
Returns:
|
3003
3046
|
a list of Artifact model representations of the artifacts
|
3004
3047
|
"""
|
3005
|
-
body = {
|
3048
|
+
body: dict[str, Any] = {
|
3006
3049
|
"artifacts": (
|
3007
3050
|
artifact_filter.model_dump(mode="json") if artifact_filter else None
|
3008
3051
|
),
|
@@ -3017,7 +3060,7 @@ class PrefectClient:
|
|
3017
3060
|
"offset": offset,
|
3018
3061
|
}
|
3019
3062
|
response = await self._client.post("/artifacts/latest/filter", json=body)
|
3020
|
-
return pydantic.TypeAdapter(
|
3063
|
+
return pydantic.TypeAdapter(list[ArtifactCollection]).validate_python(
|
3021
3064
|
response.json()
|
3022
3065
|
)
|
3023
3066
|
|
@@ -3076,7 +3119,7 @@ class PrefectClient:
|
|
3076
3119
|
else:
|
3077
3120
|
raise
|
3078
3121
|
|
3079
|
-
async def delete_variable_by_name(self, name: str):
|
3122
|
+
async def delete_variable_by_name(self, name: str) -> None:
|
3080
3123
|
"""Deletes a variable by name."""
|
3081
3124
|
try:
|
3082
3125
|
await self._client.delete(f"/variables/name/{name}")
|
@@ -3086,12 +3129,12 @@ class PrefectClient:
|
|
3086
3129
|
else:
|
3087
3130
|
raise
|
3088
3131
|
|
3089
|
-
async def read_variables(self, limit: Optional[int] = None) ->
|
3132
|
+
async def read_variables(self, limit: Optional[int] = None) -> list[Variable]:
|
3090
3133
|
"""Reads all variables."""
|
3091
3134
|
response = await self._client.post("/variables/filter", json={"limit": limit})
|
3092
|
-
return pydantic.TypeAdapter(
|
3135
|
+
return pydantic.TypeAdapter(list[Variable]).validate_python(response.json())
|
3093
3136
|
|
3094
|
-
async def read_worker_metadata(self) ->
|
3137
|
+
async def read_worker_metadata(self) -> dict[str, Any]:
|
3095
3138
|
"""Reads worker metadata stored in Prefect collection registry."""
|
3096
3139
|
response = await self._client.get("collections/views/aggregate-worker-metadata")
|
3097
3140
|
response.raise_for_status()
|
@@ -3099,7 +3142,7 @@ class PrefectClient:
|
|
3099
3142
|
|
3100
3143
|
async def increment_concurrency_slots(
|
3101
3144
|
self,
|
3102
|
-
names:
|
3145
|
+
names: list[str],
|
3103
3146
|
slots: int,
|
3104
3147
|
mode: str,
|
3105
3148
|
create_if_missing: Optional[bool] = None,
|
@@ -3115,7 +3158,7 @@ class PrefectClient:
|
|
3115
3158
|
)
|
3116
3159
|
|
3117
3160
|
async def release_concurrency_slots(
|
3118
|
-
self, names:
|
3161
|
+
self, names: list[str], slots: int, occupancy_seconds: float
|
3119
3162
|
) -> httpx.Response:
|
3120
3163
|
"""
|
3121
3164
|
Release concurrency slots for the specified limits.
|
@@ -3187,7 +3230,9 @@ class PrefectClient:
|
|
3187
3230
|
else:
|
3188
3231
|
raise
|
3189
3232
|
|
3190
|
-
async def upsert_global_concurrency_limit_by_name(
|
3233
|
+
async def upsert_global_concurrency_limit_by_name(
|
3234
|
+
self, name: str, limit: int
|
3235
|
+
) -> None:
|
3191
3236
|
"""Creates a global concurrency limit with the given name and limit if one does not already exist.
|
3192
3237
|
|
3193
3238
|
If one does already exist matching the name then update it's limit if it is different.
|
@@ -3213,7 +3258,7 @@ class PrefectClient:
|
|
3213
3258
|
|
3214
3259
|
async def read_global_concurrency_limits(
|
3215
3260
|
self, limit: int = 10, offset: int = 0
|
3216
|
-
) ->
|
3261
|
+
) -> list[GlobalConcurrencyLimitResponse]:
|
3217
3262
|
response = await self._client.post(
|
3218
3263
|
"/v2/concurrency_limits/filter",
|
3219
3264
|
json={
|
@@ -3222,12 +3267,12 @@ class PrefectClient:
|
|
3222
3267
|
},
|
3223
3268
|
)
|
3224
3269
|
return pydantic.TypeAdapter(
|
3225
|
-
|
3270
|
+
list[GlobalConcurrencyLimitResponse]
|
3226
3271
|
).validate_python(response.json())
|
3227
3272
|
|
3228
3273
|
async def create_flow_run_input(
|
3229
3274
|
self, flow_run_id: UUID, key: str, value: str, sender: Optional[str] = None
|
3230
|
-
):
|
3275
|
+
) -> None:
|
3231
3276
|
"""
|
3232
3277
|
Creates a flow run input.
|
3233
3278
|
|
@@ -3248,8 +3293,8 @@ class PrefectClient:
|
|
3248
3293
|
response.raise_for_status()
|
3249
3294
|
|
3250
3295
|
async def filter_flow_run_input(
|
3251
|
-
self, flow_run_id: UUID, key_prefix: str, limit: int, exclude_keys:
|
3252
|
-
) ->
|
3296
|
+
self, flow_run_id: UUID, key_prefix: str, limit: int, exclude_keys: set[str]
|
3297
|
+
) -> list[FlowRunInput]:
|
3253
3298
|
response = await self._client.post(
|
3254
3299
|
f"/flow_runs/{flow_run_id}/input/filter",
|
3255
3300
|
json={
|
@@ -3259,7 +3304,7 @@ class PrefectClient:
|
|
3259
3304
|
},
|
3260
3305
|
)
|
3261
3306
|
response.raise_for_status()
|
3262
|
-
return pydantic.TypeAdapter(
|
3307
|
+
return pydantic.TypeAdapter(list[FlowRunInput]).validate_python(response.json())
|
3263
3308
|
|
3264
3309
|
async def read_flow_run_input(self, flow_run_id: UUID, key: str) -> str:
|
3265
3310
|
"""
|
@@ -3273,7 +3318,7 @@ class PrefectClient:
|
|
3273
3318
|
response.raise_for_status()
|
3274
3319
|
return response.content.decode()
|
3275
3320
|
|
3276
|
-
async def delete_flow_run_input(self, flow_run_id: UUID, key: str):
|
3321
|
+
async def delete_flow_run_input(self, flow_run_id: UUID, key: str) -> None:
|
3277
3322
|
"""
|
3278
3323
|
Deletes a flow run input.
|
3279
3324
|
|
@@ -3293,7 +3338,9 @@ class PrefectClient:
|
|
3293
3338
|
|
3294
3339
|
return UUID(response.json()["id"])
|
3295
3340
|
|
3296
|
-
async def update_automation(
|
3341
|
+
async def update_automation(
|
3342
|
+
self, automation_id: UUID, automation: AutomationCore
|
3343
|
+
) -> None:
|
3297
3344
|
"""Updates an automation in Prefect Cloud."""
|
3298
3345
|
response = await self._client.put(
|
3299
3346
|
f"/automations/{automation_id}",
|
@@ -3301,21 +3348,23 @@ class PrefectClient:
|
|
3301
3348
|
)
|
3302
3349
|
response.raise_for_status
|
3303
3350
|
|
3304
|
-
async def read_automations(self) ->
|
3351
|
+
async def read_automations(self) -> list[Automation]:
|
3305
3352
|
response = await self._client.post("/automations/filter")
|
3306
3353
|
response.raise_for_status()
|
3307
|
-
return pydantic.TypeAdapter(
|
3354
|
+
return pydantic.TypeAdapter(list[Automation]).validate_python(response.json())
|
3308
3355
|
|
3309
3356
|
async def find_automation(
|
3310
3357
|
self, id_or_name: Union[str, UUID]
|
3311
3358
|
) -> Optional[Automation]:
|
3312
3359
|
if isinstance(id_or_name, str):
|
3360
|
+
name = id_or_name
|
3313
3361
|
try:
|
3314
3362
|
id = UUID(id_or_name)
|
3315
3363
|
except ValueError:
|
3316
3364
|
id = None
|
3317
|
-
|
3365
|
+
else:
|
3318
3366
|
id = id_or_name
|
3367
|
+
name = str(id)
|
3319
3368
|
|
3320
3369
|
if id:
|
3321
3370
|
try:
|
@@ -3329,24 +3378,26 @@ class PrefectClient:
|
|
3329
3378
|
|
3330
3379
|
# Look for it by an exact name
|
3331
3380
|
for automation in automations:
|
3332
|
-
if automation.name ==
|
3381
|
+
if automation.name == name:
|
3333
3382
|
return automation
|
3334
3383
|
|
3335
3384
|
# Look for it by a case-insensitive name
|
3336
3385
|
for automation in automations:
|
3337
|
-
if automation.name.lower() ==
|
3386
|
+
if automation.name.lower() == name.lower():
|
3338
3387
|
return automation
|
3339
3388
|
|
3340
3389
|
return None
|
3341
3390
|
|
3342
|
-
async def read_automation(
|
3391
|
+
async def read_automation(
|
3392
|
+
self, automation_id: Union[UUID, str]
|
3393
|
+
) -> Optional[Automation]:
|
3343
3394
|
response = await self._client.get(f"/automations/{automation_id}")
|
3344
3395
|
if response.status_code == 404:
|
3345
3396
|
return None
|
3346
3397
|
response.raise_for_status()
|
3347
3398
|
return Automation.model_validate(response.json())
|
3348
3399
|
|
3349
|
-
async def read_automations_by_name(self, name: str) ->
|
3400
|
+
async def read_automations_by_name(self, name: str) -> list[Automation]:
|
3350
3401
|
"""
|
3351
3402
|
Query the Prefect API for an automation by name. Only automations matching the provided name will be returned.
|
3352
3403
|
|
@@ -3356,7 +3407,9 @@ class PrefectClient:
|
|
3356
3407
|
Returns:
|
3357
3408
|
a list of Automation model representations of the automations
|
3358
3409
|
"""
|
3359
|
-
automation_filter = filters.AutomationFilter(
|
3410
|
+
automation_filter = filters.AutomationFilter(
|
3411
|
+
name=filters.AutomationFilterName(any_=[name])
|
3412
|
+
)
|
3360
3413
|
|
3361
3414
|
response = await self._client.post(
|
3362
3415
|
"/automations/filter",
|
@@ -3370,21 +3423,21 @@ class PrefectClient:
|
|
3370
3423
|
|
3371
3424
|
response.raise_for_status()
|
3372
3425
|
|
3373
|
-
return pydantic.TypeAdapter(
|
3426
|
+
return pydantic.TypeAdapter(list[Automation]).validate_python(response.json())
|
3374
3427
|
|
3375
|
-
async def pause_automation(self, automation_id: UUID):
|
3428
|
+
async def pause_automation(self, automation_id: UUID) -> None:
|
3376
3429
|
response = await self._client.patch(
|
3377
3430
|
f"/automations/{automation_id}", json={"enabled": False}
|
3378
3431
|
)
|
3379
3432
|
response.raise_for_status()
|
3380
3433
|
|
3381
|
-
async def resume_automation(self, automation_id: UUID):
|
3434
|
+
async def resume_automation(self, automation_id: UUID) -> None:
|
3382
3435
|
response = await self._client.patch(
|
3383
3436
|
f"/automations/{automation_id}", json={"enabled": True}
|
3384
3437
|
)
|
3385
3438
|
response.raise_for_status()
|
3386
3439
|
|
3387
|
-
async def delete_automation(self, automation_id: UUID):
|
3440
|
+
async def delete_automation(self, automation_id: UUID) -> None:
|
3388
3441
|
response = await self._client.delete(f"/automations/{automation_id}")
|
3389
3442
|
if response.status_code == 404:
|
3390
3443
|
return
|
@@ -3393,12 +3446,12 @@ class PrefectClient:
|
|
3393
3446
|
|
3394
3447
|
async def read_resource_related_automations(
|
3395
3448
|
self, resource_id: str
|
3396
|
-
) ->
|
3449
|
+
) -> list[Automation]:
|
3397
3450
|
response = await self._client.get(f"/automations/related-to/{resource_id}")
|
3398
3451
|
response.raise_for_status()
|
3399
|
-
return pydantic.TypeAdapter(
|
3452
|
+
return pydantic.TypeAdapter(list[Automation]).validate_python(response.json())
|
3400
3453
|
|
3401
|
-
async def delete_resource_owned_automations(self, resource_id: str):
|
3454
|
+
async def delete_resource_owned_automations(self, resource_id: str) -> None:
|
3402
3455
|
await self._client.delete(f"/automations/owned-by/{resource_id}")
|
3403
3456
|
|
3404
3457
|
async def api_version(self) -> str:
|
@@ -3408,7 +3461,7 @@ class PrefectClient:
|
|
3408
3461
|
def client_version(self) -> str:
|
3409
3462
|
return prefect.__version__
|
3410
3463
|
|
3411
|
-
async def raise_for_api_version_mismatch(self):
|
3464
|
+
async def raise_for_api_version_mismatch(self) -> None:
|
3412
3465
|
# Cloud is always compatible as a server
|
3413
3466
|
if self.server_type == ServerType.CLOUD:
|
3414
3467
|
return
|
@@ -3427,7 +3480,19 @@ class PrefectClient:
|
|
3427
3480
|
f"Major versions must match."
|
3428
3481
|
)
|
3429
3482
|
|
3430
|
-
async def
|
3483
|
+
async def update_flow_run_labels(
|
3484
|
+
self, flow_run_id: UUID, labels: KeyValueLabelsField
|
3485
|
+
) -> None:
|
3486
|
+
"""
|
3487
|
+
Updates the labels of a flow run.
|
3488
|
+
"""
|
3489
|
+
|
3490
|
+
response = await self._client.patch(
|
3491
|
+
f"/flow_runs/{flow_run_id}/labels", json=labels
|
3492
|
+
)
|
3493
|
+
response.raise_for_status()
|
3494
|
+
|
3495
|
+
async def __aenter__(self) -> Self:
|
3431
3496
|
"""
|
3432
3497
|
Start the client.
|
3433
3498
|
|
@@ -3474,7 +3539,7 @@ class PrefectClient:
|
|
3474
3539
|
|
3475
3540
|
return self
|
3476
3541
|
|
3477
|
-
async def __aexit__(self, *exc_info):
|
3542
|
+
async def __aexit__(self, *exc_info: Any) -> Optional[bool]:
|
3478
3543
|
"""
|
3479
3544
|
Shutdown the client.
|
3480
3545
|
"""
|
@@ -3485,13 +3550,13 @@ class PrefectClient:
|
|
3485
3550
|
self._closed = True
|
3486
3551
|
return await self._exit_stack.__aexit__(*exc_info)
|
3487
3552
|
|
3488
|
-
def __enter__(self):
|
3553
|
+
def __enter__(self) -> NoReturn:
|
3489
3554
|
raise RuntimeError(
|
3490
3555
|
"The `PrefectClient` must be entered with an async context. Use 'async "
|
3491
3556
|
"with PrefectClient(...)' not 'with PrefectClient(...)'"
|
3492
3557
|
)
|
3493
3558
|
|
3494
|
-
def __exit__(self, *_):
|
3559
|
+
def __exit__(self, *_: object) -> NoReturn:
|
3495
3560
|
assert False, "This should never be called but must be defined for __enter__"
|
3496
3561
|
|
3497
3562
|
|
@@ -3527,19 +3592,25 @@ class SyncPrefectClient:
|
|
3527
3592
|
*,
|
3528
3593
|
api_key: Optional[str] = None,
|
3529
3594
|
api_version: Optional[str] = None,
|
3530
|
-
httpx_settings: Optional[
|
3595
|
+
httpx_settings: Optional[dict[str, Any]] = None,
|
3531
3596
|
server_type: Optional[ServerType] = None,
|
3532
3597
|
) -> None:
|
3533
3598
|
httpx_settings = httpx_settings.copy() if httpx_settings else {}
|
3534
3599
|
httpx_settings.setdefault("headers", {})
|
3535
3600
|
|
3536
3601
|
if PREFECT_API_TLS_INSECURE_SKIP_VERIFY:
|
3537
|
-
|
3602
|
+
# Create an unverified context for insecure connections
|
3603
|
+
ctx = ssl.create_default_context()
|
3604
|
+
ctx.check_hostname = False
|
3605
|
+
ctx.verify_mode = ssl.CERT_NONE
|
3606
|
+
httpx_settings.setdefault("verify", ctx)
|
3538
3607
|
else:
|
3539
3608
|
cert_file = PREFECT_API_SSL_CERT_FILE.value()
|
3540
3609
|
if not cert_file:
|
3541
3610
|
cert_file = certifi.where()
|
3542
|
-
|
3611
|
+
# Create a verified context with the certificate file
|
3612
|
+
ctx = ssl.create_default_context(cafile=cert_file)
|
3613
|
+
httpx_settings.setdefault("verify", ctx)
|
3543
3614
|
|
3544
3615
|
if api_version is None:
|
3545
3616
|
api_version = SERVER_API_VERSION
|
@@ -3597,16 +3668,10 @@ class SyncPrefectClient:
|
|
3597
3668
|
)
|
3598
3669
|
|
3599
3670
|
# Connect to an in-process application
|
3600
|
-
|
3671
|
+
else:
|
3601
3672
|
self._ephemeral_app = api
|
3602
3673
|
self.server_type = ServerType.EPHEMERAL
|
3603
3674
|
|
3604
|
-
else:
|
3605
|
-
raise TypeError(
|
3606
|
-
f"Unexpected type {type(api).__name__!r} for argument `api`. Expected"
|
3607
|
-
" 'str' or 'ASGIApp/FastAPI'"
|
3608
|
-
)
|
3609
|
-
|
3610
3675
|
# See https://www.python-httpx.org/advanced/#timeout-configuration
|
3611
3676
|
httpx_settings.setdefault(
|
3612
3677
|
"timeout",
|
@@ -3649,9 +3714,9 @@ class SyncPrefectClient:
|
|
3649
3714
|
if isinstance(server_transport, httpx.HTTPTransport):
|
3650
3715
|
pool = getattr(server_transport, "_pool", None)
|
3651
3716
|
if isinstance(pool, httpcore.ConnectionPool):
|
3652
|
-
pool
|
3717
|
+
setattr(pool, "_retries", 3)
|
3653
3718
|
|
3654
|
-
self.logger = get_logger("client")
|
3719
|
+
self.logger: Logger = get_logger("client")
|
3655
3720
|
|
3656
3721
|
@property
|
3657
3722
|
def api_url(self) -> httpx.URL:
|
@@ -3689,7 +3754,7 @@ class SyncPrefectClient:
|
|
3689
3754
|
|
3690
3755
|
return self
|
3691
3756
|
|
3692
|
-
def __exit__(self, *exc_info) -> None:
|
3757
|
+
def __exit__(self, *exc_info: Any) -> None:
|
3693
3758
|
"""
|
3694
3759
|
Shutdown the client.
|
3695
3760
|
"""
|
@@ -3727,7 +3792,7 @@ class SyncPrefectClient:
|
|
3727
3792
|
def client_version(self) -> str:
|
3728
3793
|
return prefect.__version__
|
3729
3794
|
|
3730
|
-
def raise_for_api_version_mismatch(self):
|
3795
|
+
def raise_for_api_version_mismatch(self) -> None:
|
3731
3796
|
# Cloud is always compatible as a server
|
3732
3797
|
if self.server_type == ServerType.CLOUD:
|
3733
3798
|
return
|
@@ -3746,7 +3811,7 @@ class SyncPrefectClient:
|
|
3746
3811
|
f"Major versions must match."
|
3747
3812
|
)
|
3748
3813
|
|
3749
|
-
def create_flow(self, flow: "FlowObject") -> UUID:
|
3814
|
+
def create_flow(self, flow: "FlowObject[Any, Any]") -> UUID:
|
3750
3815
|
"""
|
3751
3816
|
Create a flow in the Prefect API.
|
3752
3817
|
|
@@ -3786,13 +3851,13 @@ class SyncPrefectClient:
|
|
3786
3851
|
|
3787
3852
|
def create_flow_run(
|
3788
3853
|
self,
|
3789
|
-
flow: "FlowObject",
|
3854
|
+
flow: "FlowObject[Any, R]",
|
3790
3855
|
name: Optional[str] = None,
|
3791
|
-
parameters: Optional[
|
3792
|
-
context: Optional[
|
3856
|
+
parameters: Optional[dict[str, Any]] = None,
|
3857
|
+
context: Optional[dict[str, Any]] = None,
|
3793
3858
|
tags: Optional[Iterable[str]] = None,
|
3794
3859
|
parent_task_run_id: Optional[UUID] = None,
|
3795
|
-
state: Optional["prefect.states.State"] = None,
|
3860
|
+
state: Optional["prefect.states.State[R]"] = None,
|
3796
3861
|
) -> FlowRun:
|
3797
3862
|
"""
|
3798
3863
|
Create a flow run for a flow.
|
@@ -3834,7 +3899,7 @@ class SyncPrefectClient:
|
|
3834
3899
|
state=state.to_state_create(),
|
3835
3900
|
empirical_policy=FlowRunPolicy(
|
3836
3901
|
retries=flow.retries,
|
3837
|
-
retry_delay=flow.retry_delay_seconds,
|
3902
|
+
retry_delay=int(flow.retry_delay_seconds or 0),
|
3838
3903
|
),
|
3839
3904
|
)
|
3840
3905
|
|
@@ -3852,12 +3917,12 @@ class SyncPrefectClient:
|
|
3852
3917
|
self,
|
3853
3918
|
flow_run_id: UUID,
|
3854
3919
|
flow_version: Optional[str] = None,
|
3855
|
-
parameters: Optional[dict] = None,
|
3920
|
+
parameters: Optional[dict[str, Any]] = None,
|
3856
3921
|
name: Optional[str] = None,
|
3857
3922
|
tags: Optional[Iterable[str]] = None,
|
3858
3923
|
empirical_policy: Optional[FlowRunPolicy] = None,
|
3859
3924
|
infrastructure_pid: Optional[str] = None,
|
3860
|
-
job_variables: Optional[dict] = None,
|
3925
|
+
job_variables: Optional[dict[str, Any]] = None,
|
3861
3926
|
) -> httpx.Response:
|
3862
3927
|
"""
|
3863
3928
|
Update a flow run's details.
|
@@ -3878,7 +3943,7 @@ class SyncPrefectClient:
|
|
3878
3943
|
Returns:
|
3879
3944
|
an `httpx.Response` object from the PATCH request
|
3880
3945
|
"""
|
3881
|
-
params = {}
|
3946
|
+
params: dict[str, Any] = {}
|
3882
3947
|
if flow_version is not None:
|
3883
3948
|
params["flow_version"] = flow_version
|
3884
3949
|
if parameters is not None:
|
@@ -3934,7 +3999,7 @@ class SyncPrefectClient:
|
|
3934
3999
|
sort: Optional[FlowRunSort] = None,
|
3935
4000
|
limit: Optional[int] = None,
|
3936
4001
|
offset: int = 0,
|
3937
|
-
) ->
|
4002
|
+
) -> list[FlowRun]:
|
3938
4003
|
"""
|
3939
4004
|
Query the Prefect API for flow runs. Only flow runs matching all criteria will
|
3940
4005
|
be returned.
|
@@ -3954,7 +4019,7 @@ class SyncPrefectClient:
|
|
3954
4019
|
a list of Flow Run model representations
|
3955
4020
|
of the flow runs
|
3956
4021
|
"""
|
3957
|
-
body = {
|
4022
|
+
body: dict[str, Any] = {
|
3958
4023
|
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
|
3959
4024
|
"flow_runs": (
|
3960
4025
|
flow_run_filter.model_dump(mode="json", exclude_unset=True)
|
@@ -3979,14 +4044,14 @@ class SyncPrefectClient:
|
|
3979
4044
|
}
|
3980
4045
|
|
3981
4046
|
response = self._client.post("/flow_runs/filter", json=body)
|
3982
|
-
return pydantic.TypeAdapter(
|
4047
|
+
return pydantic.TypeAdapter(list[FlowRun]).validate_python(response.json())
|
3983
4048
|
|
3984
4049
|
def set_flow_run_state(
|
3985
4050
|
self,
|
3986
4051
|
flow_run_id: UUID,
|
3987
|
-
state: "prefect.states.State",
|
4052
|
+
state: "prefect.states.State[T]",
|
3988
4053
|
force: bool = False,
|
3989
|
-
) -> OrchestrationResult:
|
4054
|
+
) -> OrchestrationResult[T]:
|
3990
4055
|
"""
|
3991
4056
|
Set the state of a flow run.
|
3992
4057
|
|
@@ -4016,16 +4081,19 @@ class SyncPrefectClient:
|
|
4016
4081
|
else:
|
4017
4082
|
raise
|
4018
4083
|
|
4019
|
-
|
4084
|
+
result: OrchestrationResult[T] = OrchestrationResult.model_validate(
|
4085
|
+
response.json()
|
4086
|
+
)
|
4087
|
+
return result
|
4020
4088
|
|
4021
|
-
def set_flow_run_name(self, flow_run_id: UUID, name: str):
|
4022
|
-
flow_run_data =
|
4089
|
+
def set_flow_run_name(self, flow_run_id: UUID, name: str) -> httpx.Response:
|
4090
|
+
flow_run_data = FlowRunUpdate(name=name)
|
4023
4091
|
return self._client.patch(
|
4024
4092
|
f"/flow_runs/{flow_run_id}",
|
4025
4093
|
json=flow_run_data.model_dump(mode="json", exclude_unset=True),
|
4026
4094
|
)
|
4027
4095
|
|
4028
|
-
def set_task_run_name(self, task_run_id: UUID, name: str):
|
4096
|
+
def set_task_run_name(self, task_run_id: UUID, name: str) -> httpx.Response:
|
4029
4097
|
task_run_data = TaskRunUpdate(name=name)
|
4030
4098
|
return self._client.patch(
|
4031
4099
|
f"/task_runs/{task_run_id}",
|
@@ -4042,9 +4110,9 @@ class SyncPrefectClient:
|
|
4042
4110
|
extra_tags: Optional[Iterable[str]] = None,
|
4043
4111
|
state: Optional[prefect.states.State[R]] = None,
|
4044
4112
|
task_inputs: Optional[
|
4045
|
-
|
4113
|
+
dict[
|
4046
4114
|
str,
|
4047
|
-
|
4115
|
+
list[
|
4048
4116
|
Union[
|
4049
4117
|
TaskRunResult,
|
4050
4118
|
Parameter,
|
@@ -4078,6 +4146,12 @@ class SyncPrefectClient:
|
|
4078
4146
|
if state is None:
|
4079
4147
|
state = prefect.states.Pending()
|
4080
4148
|
|
4149
|
+
retry_delay = task.retry_delay_seconds
|
4150
|
+
if isinstance(retry_delay, list):
|
4151
|
+
retry_delay = [int(rd) for rd in retry_delay]
|
4152
|
+
elif isinstance(retry_delay, float):
|
4153
|
+
retry_delay = int(retry_delay)
|
4154
|
+
|
4081
4155
|
task_run_data = TaskRunCreate(
|
4082
4156
|
id=id,
|
4083
4157
|
name=name,
|
@@ -4088,7 +4162,7 @@ class SyncPrefectClient:
|
|
4088
4162
|
task_version=task.version,
|
4089
4163
|
empirical_policy=TaskRunPolicy(
|
4090
4164
|
retries=task.retries,
|
4091
|
-
retry_delay=
|
4165
|
+
retry_delay=retry_delay,
|
4092
4166
|
retry_jitter_factor=task.retry_jitter_factor,
|
4093
4167
|
),
|
4094
4168
|
state=state.to_state_create(),
|
@@ -4122,14 +4196,14 @@ class SyncPrefectClient:
|
|
4122
4196
|
def read_task_runs(
|
4123
4197
|
self,
|
4124
4198
|
*,
|
4125
|
-
flow_filter: FlowFilter = None,
|
4126
|
-
flow_run_filter: FlowRunFilter = None,
|
4127
|
-
task_run_filter: TaskRunFilter = None,
|
4128
|
-
deployment_filter: DeploymentFilter = None,
|
4129
|
-
sort: TaskRunSort = None,
|
4199
|
+
flow_filter: Optional[FlowFilter] = None,
|
4200
|
+
flow_run_filter: Optional[FlowRunFilter] = None,
|
4201
|
+
task_run_filter: Optional[TaskRunFilter] = None,
|
4202
|
+
deployment_filter: Optional[DeploymentFilter] = None,
|
4203
|
+
sort: Optional[TaskRunSort] = None,
|
4130
4204
|
limit: Optional[int] = None,
|
4131
4205
|
offset: int = 0,
|
4132
|
-
) ->
|
4206
|
+
) -> list[TaskRun]:
|
4133
4207
|
"""
|
4134
4208
|
Query the Prefect API for task runs. Only task runs matching all criteria will
|
4135
4209
|
be returned.
|
@@ -4147,7 +4221,7 @@ class SyncPrefectClient:
|
|
4147
4221
|
a list of Task Run model representations
|
4148
4222
|
of the task runs
|
4149
4223
|
"""
|
4150
|
-
body = {
|
4224
|
+
body: dict[str, Any] = {
|
4151
4225
|
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
|
4152
4226
|
"flow_runs": (
|
4153
4227
|
flow_run_filter.model_dump(mode="json", exclude_unset=True)
|
@@ -4165,14 +4239,14 @@ class SyncPrefectClient:
|
|
4165
4239
|
"offset": offset,
|
4166
4240
|
}
|
4167
4241
|
response = self._client.post("/task_runs/filter", json=body)
|
4168
|
-
return pydantic.TypeAdapter(
|
4242
|
+
return pydantic.TypeAdapter(list[TaskRun]).validate_python(response.json())
|
4169
4243
|
|
4170
4244
|
def set_task_run_state(
|
4171
4245
|
self,
|
4172
4246
|
task_run_id: UUID,
|
4173
|
-
state: prefect.states.State,
|
4247
|
+
state: prefect.states.State[Any],
|
4174
4248
|
force: bool = False,
|
4175
|
-
) -> OrchestrationResult:
|
4249
|
+
) -> OrchestrationResult[Any]:
|
4176
4250
|
"""
|
4177
4251
|
Set the state of a task run.
|
4178
4252
|
|
@@ -4191,9 +4265,12 @@ class SyncPrefectClient:
|
|
4191
4265
|
f"/task_runs/{task_run_id}/set_state",
|
4192
4266
|
json=dict(state=state_create.model_dump(mode="json"), force=force),
|
4193
4267
|
)
|
4194
|
-
|
4268
|
+
result: OrchestrationResult[Any] = OrchestrationResult.model_validate(
|
4269
|
+
response.json()
|
4270
|
+
)
|
4271
|
+
return result
|
4195
4272
|
|
4196
|
-
def read_task_run_states(self, task_run_id: UUID) ->
|
4273
|
+
def read_task_run_states(self, task_run_id: UUID) -> list[prefect.states.State]:
|
4197
4274
|
"""
|
4198
4275
|
Query for the states of a task run
|
4199
4276
|
|
@@ -4206,7 +4283,7 @@ class SyncPrefectClient:
|
|
4206
4283
|
response = self._client.get(
|
4207
4284
|
"/task_run_states/", params=dict(task_run_id=str(task_run_id))
|
4208
4285
|
)
|
4209
|
-
return pydantic.TypeAdapter(
|
4286
|
+
return pydantic.TypeAdapter(list[prefect.states.State]).validate_python(
|
4210
4287
|
response.json()
|
4211
4288
|
)
|
4212
4289
|
|
@@ -4280,7 +4357,7 @@ class SyncPrefectClient:
|
|
4280
4357
|
return Artifact.model_validate(response.json())
|
4281
4358
|
|
4282
4359
|
def release_concurrency_slots(
|
4283
|
-
self, names:
|
4360
|
+
self, names: list[str], slots: int, occupancy_seconds: float
|
4284
4361
|
) -> httpx.Response:
|
4285
4362
|
"""
|
4286
4363
|
Release concurrency slots for the specified limits.
|
@@ -4304,7 +4381,7 @@ class SyncPrefectClient:
|
|
4304
4381
|
)
|
4305
4382
|
|
4306
4383
|
def decrement_v1_concurrency_slots(
|
4307
|
-
self, names:
|
4384
|
+
self, names: list[str], occupancy_seconds: float, task_run_id: UUID
|
4308
4385
|
) -> httpx.Response:
|
4309
4386
|
"""
|
4310
4387
|
Release the specified concurrency limits.
|
@@ -4326,3 +4403,15 @@ class SyncPrefectClient:
|
|
4326
4403
|
"task_run_id": str(task_run_id),
|
4327
4404
|
},
|
4328
4405
|
)
|
4406
|
+
|
4407
|
+
def update_flow_run_labels(
|
4408
|
+
self, flow_run_id: UUID, labels: KeyValueLabelsField
|
4409
|
+
) -> None:
|
4410
|
+
"""
|
4411
|
+
Updates the labels of a flow run.
|
4412
|
+
"""
|
4413
|
+
response = self._client.patch(
|
4414
|
+
f"/flow_runs/{flow_run_id}/labels",
|
4415
|
+
json=labels,
|
4416
|
+
)
|
4417
|
+
response.raise_for_status()
|