prefect-client 3.1.11__py3-none-any.whl → 3.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_experimental/sla/__init__.py +0 -0
- prefect/_experimental/sla/client.py +66 -0
- prefect/_experimental/sla/objects.py +53 -0
- prefect/_version.py +3 -3
- prefect/automations.py +236 -30
- prefect/blocks/__init__.py +3 -3
- prefect/blocks/abstract.py +53 -30
- prefect/blocks/core.py +181 -82
- prefect/blocks/notifications.py +133 -73
- prefect/blocks/redis.py +13 -9
- prefect/blocks/system.py +24 -11
- prefect/blocks/webhook.py +7 -5
- prefect/cache_policies.py +3 -2
- prefect/client/orchestration/__init__.py +103 -2006
- prefect/client/orchestration/_automations/__init__.py +0 -0
- prefect/client/orchestration/_automations/client.py +329 -0
- prefect/client/orchestration/_blocks_documents/__init__.py +0 -0
- prefect/client/orchestration/_blocks_documents/client.py +334 -0
- prefect/client/orchestration/_blocks_schemas/__init__.py +0 -0
- prefect/client/orchestration/_blocks_schemas/client.py +200 -0
- prefect/client/orchestration/_blocks_types/__init__.py +0 -0
- prefect/client/orchestration/_blocks_types/client.py +380 -0
- prefect/client/orchestration/_deployments/__init__.py +0 -0
- prefect/client/orchestration/_deployments/client.py +1128 -0
- prefect/client/orchestration/_flow_runs/__init__.py +0 -0
- prefect/client/orchestration/_flow_runs/client.py +903 -0
- prefect/client/orchestration/_flows/__init__.py +0 -0
- prefect/client/orchestration/_flows/client.py +343 -0
- prefect/client/orchestration/_logs/client.py +16 -14
- prefect/client/schemas/__init__.py +68 -28
- prefect/client/schemas/objects.py +5 -5
- prefect/context.py +15 -1
- prefect/deployments/base.py +6 -0
- prefect/deployments/runner.py +42 -1
- prefect/engine.py +17 -4
- prefect/filesystems.py +6 -2
- prefect/flow_engine.py +47 -38
- prefect/flows.py +10 -1
- prefect/logging/logging.yml +1 -1
- prefect/runner/runner.py +4 -2
- prefect/settings/models/cloud.py +5 -0
- prefect/settings/models/experiments.py +0 -5
- prefect/states.py +57 -38
- prefect/task_runners.py +56 -55
- prefect/task_worker.py +2 -2
- prefect/tasks.py +6 -4
- prefect/telemetry/bootstrap.py +10 -9
- prefect/telemetry/services.py +4 -0
- prefect/utilities/templating.py +25 -1
- prefect/workers/base.py +6 -3
- prefect/workers/process.py +1 -1
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/METADATA +2 -2
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/RECORD +56 -39
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/LICENSE +0 -0
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/WHEEL +0 -0
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/top_level.txt +0 -0
File without changes
|
@@ -0,0 +1,343 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import TYPE_CHECKING, Any
|
4
|
+
|
5
|
+
from httpx import HTTPStatusError, RequestError
|
6
|
+
|
7
|
+
from prefect.client.orchestration.base import BaseAsyncClient, BaseClient
|
8
|
+
from prefect.exceptions import ObjectNotFound
|
9
|
+
|
10
|
+
if TYPE_CHECKING:
|
11
|
+
from uuid import UUID
|
12
|
+
|
13
|
+
from prefect.client.schemas.filters import (
|
14
|
+
DeploymentFilter,
|
15
|
+
FlowFilter,
|
16
|
+
FlowRunFilter,
|
17
|
+
TaskRunFilter,
|
18
|
+
WorkPoolFilter,
|
19
|
+
WorkQueueFilter,
|
20
|
+
)
|
21
|
+
from prefect.client.schemas.objects import (
|
22
|
+
Flow,
|
23
|
+
)
|
24
|
+
from prefect.client.schemas.sorting import FlowSort
|
25
|
+
from prefect.flows import Flow as FlowObject
|
26
|
+
|
27
|
+
|
28
|
+
class FlowClient(BaseClient):
|
29
|
+
def create_flow(self, flow: "FlowObject[Any, Any]") -> "UUID":
|
30
|
+
"""
|
31
|
+
Create a flow in the Prefect API.
|
32
|
+
|
33
|
+
Args:
|
34
|
+
flow: a [Flow][prefect.flows.Flow] object
|
35
|
+
|
36
|
+
Raises:
|
37
|
+
httpx.RequestError: if a flow was not created for any reason
|
38
|
+
|
39
|
+
Returns:
|
40
|
+
the ID of the flow in the backend
|
41
|
+
"""
|
42
|
+
return self.create_flow_from_name(flow.name)
|
43
|
+
|
44
|
+
def create_flow_from_name(self, flow_name: str) -> "UUID":
|
45
|
+
"""
|
46
|
+
Create a flow in the Prefect API.
|
47
|
+
|
48
|
+
Args:
|
49
|
+
flow_name: the name of the new flow
|
50
|
+
|
51
|
+
Raises:
|
52
|
+
httpx.RequestError: if a flow was not created for any reason
|
53
|
+
|
54
|
+
Returns:
|
55
|
+
the ID of the flow in the backend
|
56
|
+
"""
|
57
|
+
from prefect.client.schemas.actions import FlowCreate
|
58
|
+
|
59
|
+
flow_data = FlowCreate(name=flow_name)
|
60
|
+
response = self.request(
|
61
|
+
"POST", "/flows/", json=flow_data.model_dump(mode="json")
|
62
|
+
)
|
63
|
+
|
64
|
+
flow_id = response.json().get("id")
|
65
|
+
if not flow_id:
|
66
|
+
raise RequestError(f"Malformed response: {response}")
|
67
|
+
|
68
|
+
# Return the id of the created flow
|
69
|
+
from uuid import UUID
|
70
|
+
|
71
|
+
return UUID(flow_id)
|
72
|
+
|
73
|
+
def read_flow(self, flow_id: "UUID") -> "Flow":
|
74
|
+
"""
|
75
|
+
Query the Prefect API for a flow by id.
|
76
|
+
|
77
|
+
Args:
|
78
|
+
flow_id: the flow ID of interest
|
79
|
+
|
80
|
+
Returns:
|
81
|
+
a [Flow model][prefect.client.schemas.objects.Flow] representation of the flow
|
82
|
+
"""
|
83
|
+
response = self.request("GET", "/flows/{id}", path_params={"id": flow_id})
|
84
|
+
from prefect.client.schemas.objects import Flow
|
85
|
+
|
86
|
+
return Flow.model_validate(response.json())
|
87
|
+
|
88
|
+
def delete_flow(self, flow_id: "UUID") -> None:
|
89
|
+
"""
|
90
|
+
Delete a flow by UUID.
|
91
|
+
|
92
|
+
Args:
|
93
|
+
flow_id: ID of the flow to be deleted
|
94
|
+
Raises:
|
95
|
+
prefect.exceptions.ObjectNotFound: If request returns 404
|
96
|
+
httpx.RequestError: If requests fail
|
97
|
+
"""
|
98
|
+
try:
|
99
|
+
self.request("DELETE", "/flows/{id}", path_params={"id": flow_id})
|
100
|
+
except HTTPStatusError as e:
|
101
|
+
if e.response.status_code == 404:
|
102
|
+
raise ObjectNotFound(http_exc=e) from e
|
103
|
+
else:
|
104
|
+
raise
|
105
|
+
|
106
|
+
def read_flows(
|
107
|
+
self,
|
108
|
+
*,
|
109
|
+
flow_filter: "FlowFilter | None" = None,
|
110
|
+
flow_run_filter: "FlowRunFilter | None" = None,
|
111
|
+
task_run_filter: "TaskRunFilter | None" = None,
|
112
|
+
deployment_filter: "DeploymentFilter | None" = None,
|
113
|
+
work_pool_filter: "WorkPoolFilter | None" = None,
|
114
|
+
work_queue_filter: "WorkQueueFilter | None" = None,
|
115
|
+
sort: "FlowSort | None" = None,
|
116
|
+
limit: int | None = None,
|
117
|
+
offset: int = 0,
|
118
|
+
) -> list["Flow"]:
|
119
|
+
"""
|
120
|
+
Query the Prefect API for flows. Only flows matching all criteria will
|
121
|
+
be returned.
|
122
|
+
|
123
|
+
Args:
|
124
|
+
flow_filter: filter criteria for flows
|
125
|
+
flow_run_filter: filter criteria for flow runs
|
126
|
+
task_run_filter: filter criteria for task runs
|
127
|
+
deployment_filter: filter criteria for deployments
|
128
|
+
work_pool_filter: filter criteria for work pools
|
129
|
+
work_queue_filter: filter criteria for work pool queues
|
130
|
+
sort: sort criteria for the flows
|
131
|
+
limit: limit for the flow query
|
132
|
+
offset: offset for the flow query
|
133
|
+
|
134
|
+
Returns:
|
135
|
+
a list of Flow model representations of the flows
|
136
|
+
"""
|
137
|
+
body: dict[str, Any] = {
|
138
|
+
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
|
139
|
+
"flow_runs": (
|
140
|
+
flow_run_filter.model_dump(mode="json", exclude_unset=True)
|
141
|
+
if flow_run_filter
|
142
|
+
else None
|
143
|
+
),
|
144
|
+
"task_runs": (
|
145
|
+
task_run_filter.model_dump(mode="json") if task_run_filter else None
|
146
|
+
),
|
147
|
+
"deployments": (
|
148
|
+
deployment_filter.model_dump(mode="json") if deployment_filter else None
|
149
|
+
),
|
150
|
+
"work_pools": (
|
151
|
+
work_pool_filter.model_dump(mode="json") if work_pool_filter else None
|
152
|
+
),
|
153
|
+
"work_queues": (
|
154
|
+
work_queue_filter.model_dump(mode="json") if work_queue_filter else None
|
155
|
+
),
|
156
|
+
"sort": sort,
|
157
|
+
"limit": limit,
|
158
|
+
"offset": offset,
|
159
|
+
}
|
160
|
+
|
161
|
+
response = self.request("POST", "/flows/filter", json=body)
|
162
|
+
from prefect.client.schemas.objects import Flow
|
163
|
+
|
164
|
+
return Flow.model_validate_list(response.json())
|
165
|
+
|
166
|
+
def read_flow_by_name(
|
167
|
+
self,
|
168
|
+
flow_name: str,
|
169
|
+
) -> "Flow":
|
170
|
+
"""
|
171
|
+
Query the Prefect API for a flow by name.
|
172
|
+
|
173
|
+
Args:
|
174
|
+
flow_name: the name of a flow
|
175
|
+
|
176
|
+
Returns:
|
177
|
+
a fully hydrated Flow model
|
178
|
+
"""
|
179
|
+
response = self.request(
|
180
|
+
"GET", "/flows/name/{name}", path_params={"name": flow_name}
|
181
|
+
)
|
182
|
+
from prefect.client.schemas.objects import Flow
|
183
|
+
|
184
|
+
return Flow.model_validate(response.json())
|
185
|
+
|
186
|
+
|
187
|
+
class FlowAsyncClient(BaseAsyncClient):
|
188
|
+
async def create_flow(self, flow: "FlowObject[Any, Any]") -> "UUID":
|
189
|
+
"""
|
190
|
+
Create a flow in the Prefect API.
|
191
|
+
|
192
|
+
Args:
|
193
|
+
flow: a [Flow][prefect.flows.Flow] object
|
194
|
+
|
195
|
+
Raises:
|
196
|
+
httpx.RequestError: if a flow was not created for any reason
|
197
|
+
|
198
|
+
Returns:
|
199
|
+
the ID of the flow in the backend
|
200
|
+
"""
|
201
|
+
return await self.create_flow_from_name(flow.name)
|
202
|
+
|
203
|
+
async def create_flow_from_name(self, flow_name: str) -> "UUID":
|
204
|
+
"""
|
205
|
+
Create a flow in the Prefect API.
|
206
|
+
|
207
|
+
Args:
|
208
|
+
flow_name: the name of the new flow
|
209
|
+
|
210
|
+
Raises:
|
211
|
+
httpx.RequestError: if a flow was not created for any reason
|
212
|
+
|
213
|
+
Returns:
|
214
|
+
the ID of the flow in the backend
|
215
|
+
"""
|
216
|
+
from prefect.client.schemas.actions import FlowCreate
|
217
|
+
|
218
|
+
flow_data = FlowCreate(name=flow_name)
|
219
|
+
response = await self.request(
|
220
|
+
"POST", "/flows/", json=flow_data.model_dump(mode="json")
|
221
|
+
)
|
222
|
+
|
223
|
+
flow_id = response.json().get("id")
|
224
|
+
if not flow_id:
|
225
|
+
raise RequestError(f"Malformed response: {response}")
|
226
|
+
|
227
|
+
# Return the id of the created flow
|
228
|
+
from uuid import UUID
|
229
|
+
|
230
|
+
return UUID(flow_id)
|
231
|
+
|
232
|
+
async def read_flow(self, flow_id: "UUID") -> "Flow":
|
233
|
+
"""
|
234
|
+
Query the Prefect API for a flow by id.
|
235
|
+
|
236
|
+
Args:
|
237
|
+
flow_id: the flow ID of interest
|
238
|
+
|
239
|
+
Returns:
|
240
|
+
a [Flow model][prefect.client.schemas.objects.Flow] representation of the flow
|
241
|
+
"""
|
242
|
+
response = await self.request("GET", "/flows/{id}", path_params={"id": flow_id})
|
243
|
+
from prefect.client.schemas.objects import Flow
|
244
|
+
|
245
|
+
return Flow.model_validate(response.json())
|
246
|
+
|
247
|
+
async def delete_flow(self, flow_id: "UUID") -> None:
|
248
|
+
"""
|
249
|
+
Delete a flow by UUID.
|
250
|
+
|
251
|
+
Args:
|
252
|
+
flow_id: ID of the flow to be deleted
|
253
|
+
Raises:
|
254
|
+
prefect.exceptions.ObjectNotFound: If request returns 404
|
255
|
+
httpx.RequestError: If requests fail
|
256
|
+
"""
|
257
|
+
try:
|
258
|
+
await self.request("DELETE", "/flows/{id}", path_params={"id": flow_id})
|
259
|
+
except HTTPStatusError as e:
|
260
|
+
if e.response.status_code == 404:
|
261
|
+
raise ObjectNotFound(http_exc=e) from e
|
262
|
+
else:
|
263
|
+
raise
|
264
|
+
|
265
|
+
async def read_flows(
|
266
|
+
self,
|
267
|
+
*,
|
268
|
+
flow_filter: "FlowFilter | None" = None,
|
269
|
+
flow_run_filter: "FlowRunFilter | None" = None,
|
270
|
+
task_run_filter: "TaskRunFilter | None" = None,
|
271
|
+
deployment_filter: "DeploymentFilter | None" = None,
|
272
|
+
work_pool_filter: "WorkPoolFilter | None" = None,
|
273
|
+
work_queue_filter: "WorkQueueFilter | None" = None,
|
274
|
+
sort: "FlowSort | None" = None,
|
275
|
+
limit: int | None = None,
|
276
|
+
offset: int = 0,
|
277
|
+
) -> list["Flow"]:
|
278
|
+
"""
|
279
|
+
Query the Prefect API for flows. Only flows matching all criteria will
|
280
|
+
be returned.
|
281
|
+
|
282
|
+
Args:
|
283
|
+
flow_filter: filter criteria for flows
|
284
|
+
flow_run_filter: filter criteria for flow runs
|
285
|
+
task_run_filter: filter criteria for task runs
|
286
|
+
deployment_filter: filter criteria for deployments
|
287
|
+
work_pool_filter: filter criteria for work pools
|
288
|
+
work_queue_filter: filter criteria for work pool queues
|
289
|
+
sort: sort criteria for the flows
|
290
|
+
limit: limit for the flow query
|
291
|
+
offset: offset for the flow query
|
292
|
+
|
293
|
+
Returns:
|
294
|
+
a list of Flow model representations of the flows
|
295
|
+
"""
|
296
|
+
body: dict[str, Any] = {
|
297
|
+
"flows": flow_filter.model_dump(mode="json") if flow_filter else None,
|
298
|
+
"flow_runs": (
|
299
|
+
flow_run_filter.model_dump(mode="json", exclude_unset=True)
|
300
|
+
if flow_run_filter
|
301
|
+
else None
|
302
|
+
),
|
303
|
+
"task_runs": (
|
304
|
+
task_run_filter.model_dump(mode="json") if task_run_filter else None
|
305
|
+
),
|
306
|
+
"deployments": (
|
307
|
+
deployment_filter.model_dump(mode="json") if deployment_filter else None
|
308
|
+
),
|
309
|
+
"work_pools": (
|
310
|
+
work_pool_filter.model_dump(mode="json") if work_pool_filter else None
|
311
|
+
),
|
312
|
+
"work_queues": (
|
313
|
+
work_queue_filter.model_dump(mode="json") if work_queue_filter else None
|
314
|
+
),
|
315
|
+
"sort": sort,
|
316
|
+
"limit": limit,
|
317
|
+
"offset": offset,
|
318
|
+
}
|
319
|
+
|
320
|
+
response = await self.request("POST", "/flows/filter", json=body)
|
321
|
+
from prefect.client.schemas.objects import Flow
|
322
|
+
|
323
|
+
return Flow.model_validate_list(response.json())
|
324
|
+
|
325
|
+
async def read_flow_by_name(
|
326
|
+
self,
|
327
|
+
flow_name: str,
|
328
|
+
) -> "Flow":
|
329
|
+
"""
|
330
|
+
Query the Prefect API for a flow by name.
|
331
|
+
|
332
|
+
Args:
|
333
|
+
flow_name: the name of a flow
|
334
|
+
|
335
|
+
Returns:
|
336
|
+
a fully hydrated Flow model
|
337
|
+
"""
|
338
|
+
response = await self.request(
|
339
|
+
"GET", "/flows/name/{name}", path_params={"name": flow_name}
|
340
|
+
)
|
341
|
+
from prefect.client.schemas.objects import Flow
|
342
|
+
|
343
|
+
return Flow.model_validate(response.json())
|
@@ -1,11 +1,8 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
|
-
from typing import TYPE_CHECKING, Any, Iterable,
|
3
|
+
from typing import TYPE_CHECKING, Any, Iterable, Union
|
4
4
|
|
5
5
|
from prefect.client.orchestration.base import BaseAsyncClient, BaseClient
|
6
|
-
from prefect.client.schemas.sorting import (
|
7
|
-
LogSort,
|
8
|
-
)
|
9
6
|
|
10
7
|
if TYPE_CHECKING:
|
11
8
|
from prefect.client.schemas.actions import (
|
@@ -17,6 +14,7 @@ if TYPE_CHECKING:
|
|
17
14
|
from prefect.client.schemas.objects import (
|
18
15
|
Log,
|
19
16
|
)
|
17
|
+
from prefect.client.schemas.sorting import LogSort
|
20
18
|
|
21
19
|
|
22
20
|
class LogClient(BaseClient):
|
@@ -34,19 +32,21 @@ class LogClient(BaseClient):
|
|
34
32
|
|
35
33
|
def read_logs(
|
36
34
|
self,
|
37
|
-
log_filter:
|
38
|
-
limit:
|
39
|
-
offset:
|
40
|
-
sort: "LogSort" =
|
35
|
+
log_filter: "LogFilter | None" = None,
|
36
|
+
limit: int | None = None,
|
37
|
+
offset: int | None = None,
|
38
|
+
sort: "LogSort | None" = None,
|
41
39
|
) -> list["Log"]:
|
42
40
|
"""
|
43
41
|
Read flow and task run logs.
|
44
42
|
"""
|
43
|
+
from prefect.client.schemas.sorting import LogSort
|
44
|
+
|
45
45
|
body: dict[str, Any] = {
|
46
46
|
"logs": log_filter.model_dump(mode="json") if log_filter else None,
|
47
47
|
"limit": limit,
|
48
48
|
"offset": offset,
|
49
|
-
"sort": sort,
|
49
|
+
"sort": sort or LogSort.TIMESTAMP_ASC,
|
50
50
|
}
|
51
51
|
response = self.request("POST", "/logs/filter", json=body)
|
52
52
|
from prefect.client.schemas.objects import Log
|
@@ -74,19 +74,21 @@ class LogAsyncClient(BaseAsyncClient):
|
|
74
74
|
|
75
75
|
async def read_logs(
|
76
76
|
self,
|
77
|
-
log_filter:
|
78
|
-
limit:
|
79
|
-
offset:
|
80
|
-
sort: "LogSort" =
|
77
|
+
log_filter: "LogFilter | None" = None,
|
78
|
+
limit: int | None = None,
|
79
|
+
offset: int | None = None,
|
80
|
+
sort: "LogSort | None" = None,
|
81
81
|
) -> list[Log]:
|
82
82
|
"""
|
83
83
|
Read flow and task run logs.
|
84
84
|
"""
|
85
|
+
from prefect.client.schemas.sorting import LogSort
|
86
|
+
|
85
87
|
body: dict[str, Any] = {
|
86
88
|
"logs": log_filter.model_dump(mode="json") if log_filter else None,
|
87
89
|
"limit": limit,
|
88
90
|
"offset": offset,
|
89
|
-
"sort": sort,
|
91
|
+
"sort": sort or LogSort.TIMESTAMP_ASC,
|
90
92
|
}
|
91
93
|
|
92
94
|
response = await self.request("POST", "/logs/filter", json=body)
|
@@ -1,32 +1,58 @@
|
|
1
|
-
|
2
|
-
|
1
|
+
import importlib
|
2
|
+
import sys
|
3
|
+
from typing import Any, TYPE_CHECKING
|
3
4
|
|
4
|
-
|
5
|
-
from .
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
5
|
+
if TYPE_CHECKING:
|
6
|
+
from .actions import BlockTypeUpdate, StateCreate
|
7
|
+
from .objects import (
|
8
|
+
DEFAULT_BLOCK_SCHEMA_VERSION,
|
9
|
+
BlockDocument,
|
10
|
+
BlockSchema,
|
11
|
+
BlockType,
|
12
|
+
FlowRun,
|
13
|
+
FlowRunPolicy,
|
14
|
+
State,
|
15
|
+
StateDetails,
|
16
|
+
StateType,
|
17
|
+
TaskRun,
|
18
|
+
TaskRunInput,
|
19
|
+
TaskRunPolicy,
|
20
|
+
TaskRunResult,
|
21
|
+
Workspace,
|
22
|
+
)
|
23
|
+
from .responses import (
|
24
|
+
OrchestrationResult,
|
25
|
+
SetStateStatus,
|
26
|
+
StateAbortDetails,
|
27
|
+
StateAcceptDetails,
|
28
|
+
StateRejectDetails,
|
29
|
+
)
|
28
30
|
|
29
|
-
|
31
|
+
_public_api = {
|
32
|
+
"BlockDocument": (__package__, ".objects"),
|
33
|
+
"BlockSchema": (__package__, ".objects"),
|
34
|
+
"BlockType": (__package__, ".objects"),
|
35
|
+
"BlockTypeUpdate": (__package__, ".actions"),
|
36
|
+
"DEFAULT_BLOCK_SCHEMA_VERSION": (__package__, ".objects"),
|
37
|
+
"FlowRun": (__package__, ".objects"),
|
38
|
+
"FlowRunPolicy": (__package__, ".objects"),
|
39
|
+
"OrchestrationResult": (__package__, ".responses"),
|
40
|
+
"SetStateStatus": (__package__, ".responses"),
|
41
|
+
"State": (__package__, ".objects"),
|
42
|
+
"StateAbortDetails": (__package__, ".responses"),
|
43
|
+
"StateAcceptDetails": (__package__, ".responses"),
|
44
|
+
"StateCreate": (__package__, ".actions"),
|
45
|
+
"StateDetails": (__package__, ".objects"),
|
46
|
+
"StateRejectDetails": (__package__, ".responses"),
|
47
|
+
"StateType": (__package__, ".objects"),
|
48
|
+
"TaskRun": (__package__, ".objects"),
|
49
|
+
"TaskRunInput": (__package__, ".objects"),
|
50
|
+
"TaskRunPolicy": (__package__, ".objects"),
|
51
|
+
"TaskRunResult": (__package__, ".objects"),
|
52
|
+
"Workspace": (__package__, ".objects"),
|
53
|
+
}
|
54
|
+
|
55
|
+
__all__ = [
|
30
56
|
"BlockDocument",
|
31
57
|
"BlockSchema",
|
32
58
|
"BlockType",
|
@@ -48,4 +74,18 @@ __all__ = (
|
|
48
74
|
"TaskRunPolicy",
|
49
75
|
"TaskRunResult",
|
50
76
|
"Workspace",
|
51
|
-
|
77
|
+
]
|
78
|
+
|
79
|
+
|
80
|
+
def __getattr__(attr_name: str) -> Any:
|
81
|
+
try:
|
82
|
+
if (dynamic_attr := _public_api.get(attr_name)) is None:
|
83
|
+
raise AttributeError(f"module {__name__} has no attribute {attr_name}")
|
84
|
+
|
85
|
+
package, mname = dynamic_attr
|
86
|
+
module = importlib.import_module(mname, package=package)
|
87
|
+
return getattr(module, attr_name)
|
88
|
+
except ModuleNotFoundError as ex:
|
89
|
+
mname, _, attr = (ex.name or "").rpartition(".")
|
90
|
+
ctx = {"name": mname, "obj": attr} if sys.version_info >= (3, 10) else {}
|
91
|
+
raise AttributeError(f"module {mname} has no attribute {attr}", **ctx) from ex
|
@@ -121,7 +121,7 @@ class WorkPoolStatus(AutoEnum):
|
|
121
121
|
PAUSED = AutoEnum.auto()
|
122
122
|
|
123
123
|
@property
|
124
|
-
def display_name(self):
|
124
|
+
def display_name(self) -> str:
|
125
125
|
return self.name.replace("_", " ").capitalize()
|
126
126
|
|
127
127
|
|
@@ -348,7 +348,7 @@ class State(ObjectBaseModel, Generic[R]):
|
|
348
348
|
)
|
349
349
|
|
350
350
|
if isinstance(self.data, ResultRecord) and should_persist_result():
|
351
|
-
data = self.data.metadata
|
351
|
+
data = self.data.metadata # pyright: ignore[reportUnknownMemberType] unable to narrow ResultRecord type
|
352
352
|
else:
|
353
353
|
data = None
|
354
354
|
|
@@ -379,7 +379,7 @@ class State(ObjectBaseModel, Generic[R]):
|
|
379
379
|
|
380
380
|
@model_validator(mode="after")
|
381
381
|
def set_unpersisted_results_to_none(self) -> Self:
|
382
|
-
if isinstance(self.data, dict) and self.data.get("type") == "unpersisted":
|
382
|
+
if isinstance(self.data, dict) and self.data.get("type") == "unpersisted": # pyright: ignore[reportUnknownMemberType] unable to narrow dict type
|
383
383
|
self.data = None
|
384
384
|
return self
|
385
385
|
|
@@ -524,7 +524,7 @@ class FlowRunPolicy(PrefectBaseModel):
|
|
524
524
|
@classmethod
|
525
525
|
def populate_deprecated_fields(cls, values: Any) -> Any:
|
526
526
|
if isinstance(values, dict):
|
527
|
-
return set_run_policy_deprecated_fields(values)
|
527
|
+
return set_run_policy_deprecated_fields(values) # pyright: ignore[reportUnknownVariableType, reportUnknownArgumentType] unable to narrow dict type
|
528
528
|
return values
|
529
529
|
|
530
530
|
|
@@ -1255,7 +1255,7 @@ class BlockDocumentReference(ObjectBaseModel):
|
|
1255
1255
|
@classmethod
|
1256
1256
|
def validate_parent_and_ref_are_different(cls, values: Any) -> Any:
|
1257
1257
|
if isinstance(values, dict):
|
1258
|
-
return validate_parent_and_ref_diff(values)
|
1258
|
+
return validate_parent_and_ref_diff(values) # pyright: ignore[reportUnknownVariableType, reportUnknownArgumentType] unable to narrow dict type
|
1259
1259
|
return values
|
1260
1260
|
|
1261
1261
|
|
prefect/context.py
CHANGED
@@ -52,7 +52,6 @@ def serialize_context() -> dict[str, Any]:
|
|
52
52
|
"""
|
53
53
|
Serialize the current context for use in a remote execution environment.
|
54
54
|
"""
|
55
|
-
|
56
55
|
flow_run_context = EngineContext.get()
|
57
56
|
task_run_context = TaskRunContext.get()
|
58
57
|
tags_context = TagsContext.get()
|
@@ -71,6 +70,21 @@ def hydrated_context(
|
|
71
70
|
serialized_context: Optional[dict[str, Any]] = None,
|
72
71
|
client: Union[PrefectClient, SyncPrefectClient, None] = None,
|
73
72
|
) -> Generator[None, Any, None]:
|
73
|
+
# We need to rebuild the models because we might be hydrating in a remote
|
74
|
+
# environment where the models are not available.
|
75
|
+
# TODO: Remove this once we have fixed our circular imports and we don't need to rebuild models any more.
|
76
|
+
from prefect.flows import Flow
|
77
|
+
from prefect.results import ResultRecordMetadata
|
78
|
+
from prefect.tasks import Task
|
79
|
+
|
80
|
+
_types: dict[str, Any] = dict(
|
81
|
+
Flow=Flow,
|
82
|
+
Task=Task,
|
83
|
+
ResultRecordMetadata=ResultRecordMetadata,
|
84
|
+
)
|
85
|
+
FlowRunContext.model_rebuild(_types_namespace=_types)
|
86
|
+
TaskRunContext.model_rebuild(_types_namespace=_types)
|
87
|
+
|
74
88
|
with ExitStack() as stack:
|
75
89
|
if serialized_context:
|
76
90
|
# Set up settings context
|
prefect/deployments/base.py
CHANGED
@@ -5,6 +5,8 @@ build system for managing flows and deployments.
|
|
5
5
|
To get started, follow along with [the deloyments tutorial](/tutorials/deployments/).
|
6
6
|
"""
|
7
7
|
|
8
|
+
from __future__ import annotations
|
9
|
+
|
8
10
|
import os
|
9
11
|
from copy import deepcopy
|
10
12
|
from pathlib import Path
|
@@ -275,6 +277,7 @@ def _save_deployment_to_prefect_file(
|
|
275
277
|
push_steps: Optional[List[Dict]] = None,
|
276
278
|
pull_steps: Optional[List[Dict]] = None,
|
277
279
|
triggers: Optional[List[Dict]] = None,
|
280
|
+
sla: Optional[list[dict]] = None,
|
278
281
|
prefect_file: Path = Path("prefect.yaml"),
|
279
282
|
):
|
280
283
|
"""
|
@@ -319,6 +322,9 @@ def _save_deployment_to_prefect_file(
|
|
319
322
|
if triggers and triggers != parsed_prefect_file_contents.get("triggers"):
|
320
323
|
deployment["triggers"] = triggers
|
321
324
|
|
325
|
+
if sla and sla != parsed_prefect_file_contents.get("sla"):
|
326
|
+
deployment["sla"] = sla
|
327
|
+
|
322
328
|
deployments = parsed_prefect_file_contents.get("deployments")
|
323
329
|
if deployments is None:
|
324
330
|
parsed_prefect_file_contents["deployments"] = [deployment]
|