prefect-client 3.1.9__py3-none-any.whl → 3.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_experimental/lineage.py +7 -8
- prefect/_internal/_logging.py +15 -3
- prefect/_internal/compatibility/async_dispatch.py +22 -16
- prefect/_internal/compatibility/deprecated.py +42 -18
- prefect/_internal/compatibility/migration.py +2 -2
- prefect/_internal/concurrency/inspection.py +12 -14
- prefect/_internal/concurrency/primitives.py +2 -2
- prefect/_internal/concurrency/services.py +154 -80
- prefect/_internal/concurrency/waiters.py +13 -9
- prefect/_internal/pydantic/annotations/pendulum.py +7 -7
- prefect/_internal/pytz.py +4 -3
- prefect/_internal/retries.py +10 -5
- prefect/_internal/schemas/bases.py +19 -10
- prefect/_internal/schemas/validators.py +227 -388
- prefect/_version.py +3 -3
- prefect/artifacts.py +61 -74
- prefect/automations.py +27 -7
- prefect/blocks/core.py +3 -3
- prefect/client/{orchestration.py → orchestration/__init__.py} +38 -701
- prefect/client/orchestration/_artifacts/__init__.py +0 -0
- prefect/client/orchestration/_artifacts/client.py +239 -0
- prefect/client/orchestration/_concurrency_limits/__init__.py +0 -0
- prefect/client/orchestration/_concurrency_limits/client.py +762 -0
- prefect/client/orchestration/_logs/__init__.py +0 -0
- prefect/client/orchestration/_logs/client.py +95 -0
- prefect/client/orchestration/_variables/__init__.py +0 -0
- prefect/client/orchestration/_variables/client.py +157 -0
- prefect/client/orchestration/base.py +46 -0
- prefect/client/orchestration/routes.py +145 -0
- prefect/client/schemas/actions.py +2 -2
- prefect/client/schemas/filters.py +5 -0
- prefect/client/schemas/objects.py +3 -10
- prefect/client/schemas/schedules.py +22 -10
- prefect/concurrency/_asyncio.py +87 -0
- prefect/concurrency/{events.py → _events.py} +10 -10
- prefect/concurrency/asyncio.py +20 -104
- prefect/concurrency/context.py +6 -4
- prefect/concurrency/services.py +26 -74
- prefect/concurrency/sync.py +23 -44
- prefect/concurrency/v1/_asyncio.py +63 -0
- prefect/concurrency/v1/{events.py → _events.py} +13 -15
- prefect/concurrency/v1/asyncio.py +27 -80
- prefect/concurrency/v1/context.py +6 -4
- prefect/concurrency/v1/services.py +33 -79
- prefect/concurrency/v1/sync.py +18 -37
- prefect/context.py +66 -70
- prefect/deployments/base.py +4 -144
- prefect/deployments/flow_runs.py +12 -2
- prefect/deployments/runner.py +11 -3
- prefect/deployments/steps/pull.py +13 -0
- prefect/events/clients.py +7 -1
- prefect/events/schemas/events.py +3 -2
- prefect/flow_engine.py +54 -47
- prefect/flows.py +2 -1
- prefect/futures.py +42 -27
- prefect/input/run_input.py +2 -1
- prefect/locking/filesystem.py +8 -7
- prefect/locking/memory.py +5 -3
- prefect/locking/protocol.py +1 -1
- prefect/main.py +1 -3
- prefect/plugins.py +12 -10
- prefect/results.py +3 -308
- prefect/runner/storage.py +87 -21
- prefect/serializers.py +32 -25
- prefect/settings/legacy.py +4 -4
- prefect/settings/models/api.py +3 -3
- prefect/settings/models/cli.py +3 -3
- prefect/settings/models/client.py +5 -3
- prefect/settings/models/cloud.py +3 -3
- prefect/settings/models/deployments.py +3 -3
- prefect/settings/models/experiments.py +4 -2
- prefect/settings/models/flows.py +3 -3
- prefect/settings/models/internal.py +4 -2
- prefect/settings/models/logging.py +4 -3
- prefect/settings/models/results.py +3 -3
- prefect/settings/models/root.py +3 -2
- prefect/settings/models/runner.py +4 -4
- prefect/settings/models/server/api.py +3 -3
- prefect/settings/models/server/database.py +11 -4
- prefect/settings/models/server/deployments.py +6 -2
- prefect/settings/models/server/ephemeral.py +4 -2
- prefect/settings/models/server/events.py +3 -2
- prefect/settings/models/server/flow_run_graph.py +6 -2
- prefect/settings/models/server/root.py +3 -3
- prefect/settings/models/server/services.py +26 -11
- prefect/settings/models/server/tasks.py +6 -3
- prefect/settings/models/server/ui.py +3 -3
- prefect/settings/models/tasks.py +5 -5
- prefect/settings/models/testing.py +3 -3
- prefect/settings/models/worker.py +5 -3
- prefect/settings/profiles.py +15 -2
- prefect/states.py +4 -7
- prefect/task_engine.py +54 -75
- prefect/tasks.py +84 -32
- prefect/telemetry/processors.py +6 -6
- prefect/telemetry/run_telemetry.py +13 -8
- prefect/telemetry/services.py +32 -31
- prefect/transactions.py +4 -15
- prefect/utilities/_git.py +34 -0
- prefect/utilities/asyncutils.py +1 -1
- prefect/utilities/engine.py +3 -19
- prefect/utilities/generics.py +18 -0
- prefect/workers/__init__.py +2 -0
- {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/METADATA +1 -1
- {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/RECORD +108 -99
- prefect/records/__init__.py +0 -1
- prefect/records/base.py +0 -235
- prefect/records/filesystem.py +0 -213
- prefect/records/memory.py +0 -184
- prefect/records/result_store.py +0 -70
- {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/LICENSE +0 -0
- {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/WHEEL +0 -0
- {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/top_level.txt +0 -0
prefect/context.py
CHANGED
@@ -9,21 +9,10 @@ For more user-accessible information about the current run, see [`prefect.runtim
|
|
9
9
|
import os
|
10
10
|
import sys
|
11
11
|
import warnings
|
12
|
+
from collections.abc import AsyncGenerator, Generator, Mapping
|
12
13
|
from contextlib import ExitStack, asynccontextmanager, contextmanager
|
13
14
|
from contextvars import ContextVar, Token
|
14
|
-
from typing import
|
15
|
-
TYPE_CHECKING,
|
16
|
-
Any,
|
17
|
-
AsyncGenerator,
|
18
|
-
Dict,
|
19
|
-
Generator,
|
20
|
-
Mapping,
|
21
|
-
Optional,
|
22
|
-
Set,
|
23
|
-
Type,
|
24
|
-
TypeVar,
|
25
|
-
Union,
|
26
|
-
)
|
15
|
+
from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TypeVar, Union
|
27
16
|
|
28
17
|
from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
|
29
18
|
from typing_extensions import Self
|
@@ -58,13 +47,8 @@ if TYPE_CHECKING:
|
|
58
47
|
from prefect.flows import Flow
|
59
48
|
from prefect.tasks import Task
|
60
49
|
|
61
|
-
# Define the global settings context variable
|
62
|
-
# This will be populated downstream but must be null here to facilitate loading the
|
63
|
-
# default settings.
|
64
|
-
GLOBAL_SETTINGS_CONTEXT = None # type: ignore
|
65
|
-
|
66
50
|
|
67
|
-
def serialize_context() ->
|
51
|
+
def serialize_context() -> dict[str, Any]:
|
68
52
|
"""
|
69
53
|
Serialize the current context for use in a remote execution environment.
|
70
54
|
"""
|
@@ -84,9 +68,9 @@ def serialize_context() -> Dict[str, Any]:
|
|
84
68
|
|
85
69
|
@contextmanager
|
86
70
|
def hydrated_context(
|
87
|
-
serialized_context: Optional[
|
71
|
+
serialized_context: Optional[dict[str, Any]] = None,
|
88
72
|
client: Union[PrefectClient, SyncPrefectClient, None] = None,
|
89
|
-
):
|
73
|
+
) -> Generator[None, Any, None]:
|
90
74
|
with ExitStack() as stack:
|
91
75
|
if serialized_context:
|
92
76
|
# Set up settings context
|
@@ -123,10 +107,15 @@ class ContextModel(BaseModel):
|
|
123
107
|
a context manager
|
124
108
|
"""
|
125
109
|
|
110
|
+
if TYPE_CHECKING:
|
111
|
+
# subclasses can pass through keyword arguments to the pydantic base model
|
112
|
+
def __init__(self, **kwargs: Any) -> None:
|
113
|
+
...
|
114
|
+
|
126
115
|
# The context variable for storing data must be defined by the child class
|
127
|
-
__var__: ContextVar[Self]
|
116
|
+
__var__: ClassVar[ContextVar[Self]]
|
128
117
|
_token: Optional[Token[Self]] = PrivateAttr(None)
|
129
|
-
model_config = ConfigDict(
|
118
|
+
model_config: ClassVar[ConfigDict] = ConfigDict(
|
130
119
|
arbitrary_types_allowed=True,
|
131
120
|
extra="forbid",
|
132
121
|
)
|
@@ -139,7 +128,7 @@ class ContextModel(BaseModel):
|
|
139
128
|
self._token = self.__var__.set(self)
|
140
129
|
return self
|
141
130
|
|
142
|
-
def __exit__(self, *_):
|
131
|
+
def __exit__(self, *_: Any) -> None:
|
143
132
|
if not self._token:
|
144
133
|
raise RuntimeError(
|
145
134
|
"Asymmetric use of context. Context exit called without an enter."
|
@@ -148,13 +137,13 @@ class ContextModel(BaseModel):
|
|
148
137
|
self._token = None
|
149
138
|
|
150
139
|
@classmethod
|
151
|
-
def get(cls:
|
140
|
+
def get(cls: type[Self]) -> Optional[Self]:
|
152
141
|
"""Get the current context instance"""
|
153
142
|
return cls.__var__.get(None)
|
154
143
|
|
155
144
|
def model_copy(
|
156
145
|
self: Self, *, update: Optional[Mapping[str, Any]] = None, deep: bool = False
|
157
|
-
):
|
146
|
+
) -> Self:
|
158
147
|
"""
|
159
148
|
Duplicate the context model, optionally choosing which fields to include, exclude, or change.
|
160
149
|
|
@@ -173,7 +162,7 @@ class ContextModel(BaseModel):
|
|
173
162
|
new._token = None
|
174
163
|
return new
|
175
164
|
|
176
|
-
def serialize(self, include_secrets: bool = True) ->
|
165
|
+
def serialize(self, include_secrets: bool = True) -> dict[str, Any]:
|
177
166
|
"""
|
178
167
|
Serialize the context model to a dictionary that can be pickled with cloudpickle.
|
179
168
|
"""
|
@@ -202,19 +191,19 @@ class SyncClientContext(ContextModel):
|
|
202
191
|
assert c1 is ctx.client
|
203
192
|
"""
|
204
193
|
|
205
|
-
__var__: ContextVar[Self] = ContextVar("sync-client-context")
|
194
|
+
__var__: ClassVar[ContextVar[Self]] = ContextVar("sync-client-context")
|
206
195
|
client: SyncPrefectClient
|
207
196
|
_httpx_settings: Optional[dict[str, Any]] = PrivateAttr(None)
|
208
197
|
_context_stack: int = PrivateAttr(0)
|
209
198
|
|
210
|
-
def __init__(self, httpx_settings: Optional[dict[str, Any]] = None):
|
199
|
+
def __init__(self, httpx_settings: Optional[dict[str, Any]] = None) -> None:
|
211
200
|
super().__init__(
|
212
|
-
client=get_client(sync_client=True, httpx_settings=httpx_settings),
|
201
|
+
client=get_client(sync_client=True, httpx_settings=httpx_settings),
|
213
202
|
)
|
214
203
|
self._httpx_settings = httpx_settings
|
215
204
|
self._context_stack = 0
|
216
205
|
|
217
|
-
def __enter__(self):
|
206
|
+
def __enter__(self) -> Self:
|
218
207
|
self._context_stack += 1
|
219
208
|
if self._context_stack == 1:
|
220
209
|
self.client.__enter__()
|
@@ -223,20 +212,20 @@ class SyncClientContext(ContextModel):
|
|
223
212
|
else:
|
224
213
|
return self
|
225
214
|
|
226
|
-
def __exit__(self, *exc_info: Any):
|
215
|
+
def __exit__(self, *exc_info: Any) -> None:
|
227
216
|
self._context_stack -= 1
|
228
217
|
if self._context_stack == 0:
|
229
|
-
self.client.__exit__(*exc_info)
|
230
|
-
return super().__exit__(*exc_info)
|
218
|
+
self.client.__exit__(*exc_info)
|
219
|
+
return super().__exit__(*exc_info)
|
231
220
|
|
232
221
|
@classmethod
|
233
222
|
@contextmanager
|
234
|
-
def get_or_create(cls) -> Generator[
|
235
|
-
ctx =
|
223
|
+
def get_or_create(cls) -> Generator[Self, None, None]:
|
224
|
+
ctx = cls.get()
|
236
225
|
if ctx:
|
237
226
|
yield ctx
|
238
227
|
else:
|
239
|
-
with
|
228
|
+
with cls() as ctx:
|
240
229
|
yield ctx
|
241
230
|
|
242
231
|
|
@@ -260,14 +249,14 @@ class AsyncClientContext(ContextModel):
|
|
260
249
|
assert c1 is ctx.client
|
261
250
|
"""
|
262
251
|
|
263
|
-
__var__ = ContextVar("async-client-context")
|
252
|
+
__var__: ClassVar[ContextVar[Self]] = ContextVar("async-client-context")
|
264
253
|
client: PrefectClient
|
265
254
|
_httpx_settings: Optional[dict[str, Any]] = PrivateAttr(None)
|
266
255
|
_context_stack: int = PrivateAttr(0)
|
267
256
|
|
268
257
|
def __init__(self, httpx_settings: Optional[dict[str, Any]] = None):
|
269
258
|
super().__init__(
|
270
|
-
client=get_client(sync_client=False, httpx_settings=httpx_settings)
|
259
|
+
client=get_client(sync_client=False, httpx_settings=httpx_settings)
|
271
260
|
)
|
272
261
|
self._httpx_settings = httpx_settings
|
273
262
|
self._context_stack = 0
|
@@ -284,8 +273,8 @@ class AsyncClientContext(ContextModel):
|
|
284
273
|
async def __aexit__(self: Self, *exc_info: Any) -> None:
|
285
274
|
self._context_stack -= 1
|
286
275
|
if self._context_stack == 0:
|
287
|
-
await self.client.__aexit__(*exc_info)
|
288
|
-
return super().__exit__(*exc_info)
|
276
|
+
await self.client.__aexit__(*exc_info)
|
277
|
+
return super().__exit__(*exc_info)
|
289
278
|
|
290
279
|
@classmethod
|
291
280
|
@asynccontextmanager
|
@@ -308,16 +297,16 @@ class RunContext(ContextModel):
|
|
308
297
|
client: The Prefect client instance being used for API communication
|
309
298
|
"""
|
310
299
|
|
311
|
-
def __init__(self, *args: Any, **kwargs: Any):
|
300
|
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
312
301
|
super().__init__(*args, **kwargs)
|
313
302
|
|
314
303
|
start_client_metrics_server()
|
315
304
|
|
316
305
|
start_time: DateTime = Field(default_factory=lambda: DateTime.now("UTC"))
|
317
|
-
input_keyset: Optional[
|
306
|
+
input_keyset: Optional[dict[str, dict[str, str]]] = None
|
318
307
|
client: Union[PrefectClient, SyncPrefectClient]
|
319
308
|
|
320
|
-
def serialize(self: Self, include_secrets: bool = True) ->
|
309
|
+
def serialize(self: Self, include_secrets: bool = True) -> dict[str, Any]:
|
321
310
|
return self.model_dump(
|
322
311
|
include={"start_time", "input_keyset"},
|
323
312
|
exclude_unset=True,
|
@@ -344,7 +333,7 @@ class EngineContext(RunContext):
|
|
344
333
|
flow_run: Optional[FlowRun] = None
|
345
334
|
task_runner: TaskRunner[Any]
|
346
335
|
log_prints: bool = False
|
347
|
-
parameters: Optional[
|
336
|
+
parameters: Optional[dict[str, Any]] = None
|
348
337
|
|
349
338
|
# Flag signaling if the flow run context has been serialized and sent
|
350
339
|
# to remote infrastructure.
|
@@ -355,10 +344,10 @@ class EngineContext(RunContext):
|
|
355
344
|
persist_result: bool = Field(default_factory=get_default_persist_setting)
|
356
345
|
|
357
346
|
# Counter for task calls allowing unique
|
358
|
-
task_run_dynamic_keys:
|
347
|
+
task_run_dynamic_keys: dict[str, Union[str, int]] = Field(default_factory=dict)
|
359
348
|
|
360
349
|
# Counter for flow pauses
|
361
|
-
observed_flow_pauses:
|
350
|
+
observed_flow_pauses: dict[str, int] = Field(default_factory=dict)
|
362
351
|
|
363
352
|
# Tracking for result from task runs in this flow run for dependency tracking
|
364
353
|
# Holds the ID of the object returned by the task run and task run state
|
@@ -367,9 +356,9 @@ class EngineContext(RunContext):
|
|
367
356
|
# Events worker to emit events
|
368
357
|
events: Optional[EventsWorker] = None
|
369
358
|
|
370
|
-
__var__: ContextVar[Self] = ContextVar("flow_run")
|
359
|
+
__var__: ClassVar[ContextVar[Self]] = ContextVar("flow_run")
|
371
360
|
|
372
|
-
def serialize(self: Self, include_secrets: bool = True) ->
|
361
|
+
def serialize(self: Self, include_secrets: bool = True) -> dict[str, Any]:
|
373
362
|
return self.model_dump(
|
374
363
|
include={
|
375
364
|
"flow_run",
|
@@ -403,15 +392,15 @@ class TaskRunContext(RunContext):
|
|
403
392
|
task: "Task[Any, Any]"
|
404
393
|
task_run: TaskRun
|
405
394
|
log_prints: bool = False
|
406
|
-
parameters:
|
395
|
+
parameters: dict[str, Any]
|
407
396
|
|
408
397
|
# Result handling
|
409
398
|
result_store: ResultStore
|
410
399
|
persist_result: bool = Field(default_factory=get_default_persist_setting_for_tasks)
|
411
400
|
|
412
|
-
__var__ = ContextVar("task_run")
|
401
|
+
__var__: ClassVar[ContextVar[Self]] = ContextVar("task_run")
|
413
402
|
|
414
|
-
def serialize(self: Self, include_secrets: bool = True) ->
|
403
|
+
def serialize(self: Self, include_secrets: bool = True) -> dict[str, Any]:
|
415
404
|
return self.model_dump(
|
416
405
|
include={
|
417
406
|
"task_run",
|
@@ -437,14 +426,14 @@ class TagsContext(ContextModel):
|
|
437
426
|
current_tags: A set of current tags in the context
|
438
427
|
"""
|
439
428
|
|
440
|
-
current_tags:
|
429
|
+
current_tags: set[str] = Field(default_factory=set)
|
441
430
|
|
442
431
|
@classmethod
|
443
|
-
def get(cls) ->
|
432
|
+
def get(cls) -> Self:
|
444
433
|
# Return an empty `TagsContext` instead of `None` if no context exists
|
445
|
-
return cls.__var__.get(
|
434
|
+
return cls.__var__.get(cls())
|
446
435
|
|
447
|
-
__var__: ContextVar[Self] = ContextVar("tags")
|
436
|
+
__var__: ClassVar[ContextVar[Self]] = ContextVar("tags")
|
448
437
|
|
449
438
|
|
450
439
|
class SettingsContext(ContextModel):
|
@@ -461,15 +450,21 @@ class SettingsContext(ContextModel):
|
|
461
450
|
profile: Profile
|
462
451
|
settings: Settings
|
463
452
|
|
464
|
-
__var__: ContextVar[Self] = ContextVar("settings")
|
453
|
+
__var__: ClassVar[ContextVar[Self]] = ContextVar("settings")
|
465
454
|
|
466
455
|
def __hash__(self: Self) -> int:
|
467
456
|
return hash(self.settings)
|
468
457
|
|
469
458
|
@classmethod
|
470
|
-
def get(cls) -> "SettingsContext":
|
459
|
+
def get(cls) -> Optional["SettingsContext"]:
|
471
460
|
# Return the global context instead of `None` if no context exists
|
472
|
-
|
461
|
+
try:
|
462
|
+
return super().get() or GLOBAL_SETTINGS_CONTEXT
|
463
|
+
except NameError:
|
464
|
+
# GLOBAL_SETTINGS_CONTEXT has not yet been set; in order to create
|
465
|
+
# it profiles need to be loaded, and that process calls
|
466
|
+
# SettingsContext.get().
|
467
|
+
return None
|
473
468
|
|
474
469
|
|
475
470
|
def get_run_context() -> Union[FlowRunContext, TaskRunContext]:
|
@@ -512,7 +507,7 @@ def get_settings_context() -> SettingsContext:
|
|
512
507
|
|
513
508
|
|
514
509
|
@contextmanager
|
515
|
-
def tags(*new_tags: str) -> Generator[
|
510
|
+
def tags(*new_tags: str) -> Generator[set[str], None, None]:
|
516
511
|
"""
|
517
512
|
Context manager to add tags to flow and task run calls.
|
518
513
|
|
@@ -570,10 +565,10 @@ def tags(*new_tags: str) -> Generator[Set[str], None, None]:
|
|
570
565
|
|
571
566
|
@contextmanager
|
572
567
|
def use_profile(
|
573
|
-
profile: Union[Profile, str
|
568
|
+
profile: Union[Profile, str],
|
574
569
|
override_environment_variables: bool = False,
|
575
570
|
include_current_context: bool = True,
|
576
|
-
):
|
571
|
+
) -> Generator[SettingsContext, Any, None]:
|
577
572
|
"""
|
578
573
|
Switch to a profile for the duration of this context.
|
579
574
|
|
@@ -595,11 +590,12 @@ def use_profile(
|
|
595
590
|
profiles = prefect.settings.load_profiles()
|
596
591
|
profile = profiles[profile]
|
597
592
|
|
598
|
-
if not
|
599
|
-
|
600
|
-
|
601
|
-
|
602
|
-
|
593
|
+
if not TYPE_CHECKING:
|
594
|
+
if not isinstance(profile, Profile):
|
595
|
+
raise TypeError(
|
596
|
+
f"Unexpected type {type(profile).__name__!r} for `profile`. "
|
597
|
+
"Expected 'str' or 'Profile'."
|
598
|
+
)
|
603
599
|
|
604
600
|
# Create a copy of the profiles settings as we will mutate it
|
605
601
|
profile_settings = profile.settings.copy()
|
@@ -620,7 +616,7 @@ def use_profile(
|
|
620
616
|
yield ctx
|
621
617
|
|
622
618
|
|
623
|
-
def root_settings_context():
|
619
|
+
def root_settings_context() -> SettingsContext:
|
624
620
|
"""
|
625
621
|
Return the settings context that will exist as the root context for the module.
|
626
622
|
|
@@ -670,9 +666,9 @@ def root_settings_context():
|
|
670
666
|
# an override in the `SettingsContext.get` method.
|
671
667
|
|
672
668
|
|
673
|
-
GLOBAL_SETTINGS_CONTEXT: SettingsContext = root_settings_context()
|
669
|
+
GLOBAL_SETTINGS_CONTEXT: SettingsContext = root_settings_context()
|
674
670
|
|
675
671
|
|
676
672
|
# 2024-07-02: This surfaces an actionable error message for removed objects
|
677
673
|
# in Prefect 3.0 upgrade.
|
678
|
-
__getattr__ = getattr_migration(__name__)
|
674
|
+
__getattr__: Callable[[str], Any] = getattr_migration(__name__)
|
prefect/deployments/base.py
CHANGED
@@ -5,27 +5,19 @@ build system for managing flows and deployments.
|
|
5
5
|
To get started, follow along with [the deloyments tutorial](/tutorials/deployments/).
|
6
6
|
"""
|
7
7
|
|
8
|
-
import ast
|
9
|
-
import asyncio
|
10
|
-
import math
|
11
8
|
import os
|
12
|
-
import subprocess
|
13
|
-
import sys
|
14
9
|
from copy import deepcopy
|
15
10
|
from pathlib import Path
|
16
11
|
from typing import Any, Dict, List, Optional, cast
|
17
12
|
|
18
|
-
import anyio
|
19
13
|
import yaml
|
20
14
|
from ruamel.yaml import YAML
|
21
15
|
|
22
16
|
from prefect.client.schemas.actions import DeploymentScheduleCreate
|
23
17
|
from prefect.client.schemas.objects import ConcurrencyLimitStrategy
|
24
18
|
from prefect.client.schemas.schedules import IntervalSchedule
|
25
|
-
from prefect.
|
26
|
-
from prefect.
|
27
|
-
from prefect.utilities.asyncutils import LazySemaphore
|
28
|
-
from prefect.utilities.filesystem import create_default_ignore_file, get_open_file_limit
|
19
|
+
from prefect.utilities._git import get_git_branch, get_git_remote_origin_url
|
20
|
+
from prefect.utilities.filesystem import create_default_ignore_file
|
29
21
|
from prefect.utilities.templating import apply_values
|
30
22
|
|
31
23
|
|
@@ -146,36 +138,6 @@ def configure_project_by_recipe(recipe: str, **formatting_kwargs) -> dict:
|
|
146
138
|
return config
|
147
139
|
|
148
140
|
|
149
|
-
def _get_git_remote_origin_url() -> Optional[str]:
|
150
|
-
"""
|
151
|
-
Returns the git remote origin URL for the current directory.
|
152
|
-
"""
|
153
|
-
try:
|
154
|
-
origin_url = subprocess.check_output(
|
155
|
-
["git", "config", "--get", "remote.origin.url"],
|
156
|
-
shell=sys.platform == "win32",
|
157
|
-
stderr=subprocess.DEVNULL,
|
158
|
-
)
|
159
|
-
origin_url = origin_url.decode().strip()
|
160
|
-
except subprocess.CalledProcessError:
|
161
|
-
return None
|
162
|
-
|
163
|
-
return origin_url
|
164
|
-
|
165
|
-
|
166
|
-
def _get_git_branch() -> Optional[str]:
|
167
|
-
"""
|
168
|
-
Returns the git branch for the current directory.
|
169
|
-
"""
|
170
|
-
try:
|
171
|
-
branch = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"])
|
172
|
-
branch = branch.decode().strip()
|
173
|
-
except subprocess.CalledProcessError:
|
174
|
-
return None
|
175
|
-
|
176
|
-
return branch
|
177
|
-
|
178
|
-
|
179
141
|
def initialize_project(
|
180
142
|
name: Optional[str] = None,
|
181
143
|
recipe: Optional[str] = None,
|
@@ -198,11 +160,11 @@ def initialize_project(
|
|
198
160
|
formatting_kwargs = {"directory": str(Path(".").absolute().resolve())}
|
199
161
|
dir_name = os.path.basename(os.getcwd())
|
200
162
|
|
201
|
-
remote_url =
|
163
|
+
remote_url = get_git_remote_origin_url()
|
202
164
|
if remote_url:
|
203
165
|
formatting_kwargs["repository"] = remote_url
|
204
166
|
is_git_based = True
|
205
|
-
branch =
|
167
|
+
branch = get_git_branch()
|
206
168
|
formatting_kwargs["branch"] = branch or "main"
|
207
169
|
|
208
170
|
formatting_kwargs["name"] = dir_name
|
@@ -373,105 +335,3 @@ def _save_deployment_to_prefect_file(
|
|
373
335
|
|
374
336
|
with prefect_file.open(mode="w") as f:
|
375
337
|
ryaml.dump(parsed_prefect_file_contents, f)
|
376
|
-
|
377
|
-
|
378
|
-
# Only allow half of the open file limit to be open at once to allow for other
|
379
|
-
# actors to open files.
|
380
|
-
OPEN_FILE_SEMAPHORE = LazySemaphore(lambda: math.floor(get_open_file_limit() * 0.5))
|
381
|
-
|
382
|
-
|
383
|
-
async def _find_flow_functions_in_file(filename: str) -> List[Dict]:
|
384
|
-
decorator_name = "flow"
|
385
|
-
decorator_module = "prefect"
|
386
|
-
decorated_functions = []
|
387
|
-
async with OPEN_FILE_SEMAPHORE:
|
388
|
-
try:
|
389
|
-
async with await anyio.open_file(filename) as f:
|
390
|
-
try:
|
391
|
-
tree = ast.parse(await f.read())
|
392
|
-
except SyntaxError:
|
393
|
-
if PREFECT_DEBUG_MODE:
|
394
|
-
get_logger().debug(
|
395
|
-
f"Could not parse {filename} as a Python file. Skipping."
|
396
|
-
)
|
397
|
-
return decorated_functions
|
398
|
-
except Exception as exc:
|
399
|
-
if PREFECT_DEBUG_MODE:
|
400
|
-
get_logger().debug(f"Could not open {filename}: {exc}. Skipping.")
|
401
|
-
return decorated_functions
|
402
|
-
|
403
|
-
for node in ast.walk(tree):
|
404
|
-
if isinstance(
|
405
|
-
node,
|
406
|
-
(
|
407
|
-
ast.FunctionDef,
|
408
|
-
ast.AsyncFunctionDef,
|
409
|
-
),
|
410
|
-
):
|
411
|
-
for decorator in node.decorator_list:
|
412
|
-
# handles @flow
|
413
|
-
is_name_match = (
|
414
|
-
isinstance(decorator, ast.Name) and decorator.id == decorator_name
|
415
|
-
)
|
416
|
-
# handles @flow()
|
417
|
-
is_func_name_match = (
|
418
|
-
isinstance(decorator, ast.Call)
|
419
|
-
and isinstance(decorator.func, ast.Name)
|
420
|
-
and decorator.func.id == decorator_name
|
421
|
-
)
|
422
|
-
# handles @prefect.flow
|
423
|
-
is_module_attribute_match = (
|
424
|
-
isinstance(decorator, ast.Attribute)
|
425
|
-
and isinstance(decorator.value, ast.Name)
|
426
|
-
and decorator.value.id == decorator_module
|
427
|
-
and decorator.attr == decorator_name
|
428
|
-
)
|
429
|
-
# handles @prefect.flow()
|
430
|
-
is_module_attribute_func_match = (
|
431
|
-
isinstance(decorator, ast.Call)
|
432
|
-
and isinstance(decorator.func, ast.Attribute)
|
433
|
-
and decorator.func.attr == decorator_name
|
434
|
-
and isinstance(decorator.func.value, ast.Name)
|
435
|
-
and decorator.func.value.id == decorator_module
|
436
|
-
)
|
437
|
-
if is_name_match or is_module_attribute_match:
|
438
|
-
decorated_functions.append(
|
439
|
-
{
|
440
|
-
"flow_name": node.name,
|
441
|
-
"function_name": node.name,
|
442
|
-
"filepath": str(filename),
|
443
|
-
}
|
444
|
-
)
|
445
|
-
if is_func_name_match or is_module_attribute_func_match:
|
446
|
-
name_kwarg_node = next(
|
447
|
-
(kw for kw in decorator.keywords if kw.arg == "name"), None
|
448
|
-
)
|
449
|
-
flow_name = (
|
450
|
-
name_kwarg_node.value.value
|
451
|
-
if isinstance(name_kwarg_node, ast.Constant)
|
452
|
-
else node.name
|
453
|
-
)
|
454
|
-
decorated_functions.append(
|
455
|
-
{
|
456
|
-
"flow_name": flow_name,
|
457
|
-
"function_name": node.name,
|
458
|
-
"filepath": str(filename),
|
459
|
-
}
|
460
|
-
)
|
461
|
-
return decorated_functions
|
462
|
-
|
463
|
-
|
464
|
-
async def _search_for_flow_functions(directory: str = ".") -> List[Dict]:
|
465
|
-
"""
|
466
|
-
Search for flow functions in the provided directory. If no directory is provided,
|
467
|
-
the current working directory is used.
|
468
|
-
|
469
|
-
Returns:
|
470
|
-
List[Dict]: the flow name, function name, and filepath of all flow functions found
|
471
|
-
"""
|
472
|
-
path = anyio.Path(directory)
|
473
|
-
coros = []
|
474
|
-
async for file in path.rglob("*.py"):
|
475
|
-
coros.append(_find_flow_functions_in_file(file))
|
476
|
-
|
477
|
-
return [fn for file_fns in await asyncio.gather(*coros) for fn in file_fns]
|
prefect/deployments/flow_runs.py
CHANGED
@@ -10,9 +10,12 @@ from prefect.client.schemas import FlowRun
|
|
10
10
|
from prefect.client.utilities import inject_client
|
11
11
|
from prefect.context import FlowRunContext, TaskRunContext
|
12
12
|
from prefect.logging import get_logger
|
13
|
-
from prefect.results import
|
13
|
+
from prefect.results import ResultRecordMetadata
|
14
14
|
from prefect.states import Pending, Scheduled
|
15
15
|
from prefect.tasks import Task
|
16
|
+
from prefect.telemetry.run_telemetry import (
|
17
|
+
LABELS_TRACEPARENT_KEY,
|
18
|
+
)
|
16
19
|
from prefect.utilities.asyncutils import sync_compatible
|
17
20
|
from prefect.utilities.slugify import slugify
|
18
21
|
|
@@ -22,7 +25,6 @@ if TYPE_CHECKING:
|
|
22
25
|
|
23
26
|
prefect.client.schemas.StateCreate.model_rebuild(
|
24
27
|
_types_namespace={
|
25
|
-
"BaseResult": BaseResult,
|
26
28
|
"ResultRecordMetadata": ResultRecordMetadata,
|
27
29
|
}
|
28
30
|
)
|
@@ -156,6 +158,13 @@ async def run_deployment(
|
|
156
158
|
else:
|
157
159
|
parent_task_run_id = None
|
158
160
|
|
161
|
+
if flow_run_ctx and flow_run_ctx.flow_run:
|
162
|
+
traceparent = flow_run_ctx.flow_run.labels.get(LABELS_TRACEPARENT_KEY)
|
163
|
+
else:
|
164
|
+
traceparent = None
|
165
|
+
|
166
|
+
trace_labels = {LABELS_TRACEPARENT_KEY: traceparent} if traceparent else {}
|
167
|
+
|
159
168
|
flow_run = await client.create_flow_run_from_deployment(
|
160
169
|
deployment.id,
|
161
170
|
parameters=parameters,
|
@@ -166,6 +175,7 @@ async def run_deployment(
|
|
166
175
|
parent_task_run_id=parent_task_run_id,
|
167
176
|
work_queue_name=work_queue_name,
|
168
177
|
job_variables=job_variables,
|
178
|
+
labels=trace_labels,
|
169
179
|
)
|
170
180
|
|
171
181
|
flow_run_id = flow_run.id
|
prefect/deployments/runner.py
CHANGED
@@ -33,7 +33,7 @@ import importlib
|
|
33
33
|
import tempfile
|
34
34
|
from datetime import datetime, timedelta
|
35
35
|
from pathlib import Path
|
36
|
-
from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Union
|
36
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Iterable, List, Optional, Union
|
37
37
|
from uuid import UUID
|
38
38
|
|
39
39
|
from pydantic import (
|
@@ -41,6 +41,7 @@ from pydantic import (
|
|
41
41
|
ConfigDict,
|
42
42
|
Field,
|
43
43
|
PrivateAttr,
|
44
|
+
field_validator,
|
44
45
|
model_validator,
|
45
46
|
)
|
46
47
|
from rich.console import Console
|
@@ -129,7 +130,7 @@ class RunnerDeployment(BaseModel):
|
|
129
130
|
available settings.
|
130
131
|
"""
|
131
132
|
|
132
|
-
model_config = ConfigDict(arbitrary_types_allowed=True)
|
133
|
+
model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True)
|
133
134
|
|
134
135
|
name: str = Field(..., description="The name of the deployment.")
|
135
136
|
flow_name: Optional[str] = Field(
|
@@ -220,6 +221,13 @@ class RunnerDeployment(BaseModel):
|
|
220
221
|
def entrypoint_type(self) -> EntrypointType:
|
221
222
|
return self._entrypoint_type
|
222
223
|
|
224
|
+
@field_validator("name", mode="before")
|
225
|
+
@classmethod
|
226
|
+
def validate_name(cls, value: str) -> str:
|
227
|
+
if value.endswith(".py"):
|
228
|
+
return Path(value).stem
|
229
|
+
return value
|
230
|
+
|
223
231
|
@model_validator(mode="after")
|
224
232
|
def validate_automation_names(self):
|
225
233
|
"""Ensure that each trigger has a name for its automation if none is provided."""
|
@@ -508,7 +516,7 @@ class RunnerDeployment(BaseModel):
|
|
508
516
|
concurrency_options = None
|
509
517
|
|
510
518
|
deployment = cls(
|
511
|
-
name=
|
519
|
+
name=name,
|
512
520
|
flow_name=flow.name,
|
513
521
|
schedules=constructed_schedules,
|
514
522
|
concurrency_limit=concurrency_limit,
|
@@ -50,6 +50,7 @@ async def agit_clone(
|
|
50
50
|
include_submodules: bool = False,
|
51
51
|
access_token: Optional[str] = None,
|
52
52
|
credentials: Optional["Block"] = None,
|
53
|
+
directories: Optional[list[str]] = None,
|
53
54
|
) -> dict[str, str]:
|
54
55
|
"""
|
55
56
|
Asynchronously clones a git repository into the current working directory.
|
@@ -81,6 +82,7 @@ async def agit_clone(
|
|
81
82
|
credentials=_credentials,
|
82
83
|
branch=branch,
|
83
84
|
include_submodules=include_submodules,
|
85
|
+
directories=directories,
|
84
86
|
)
|
85
87
|
|
86
88
|
await _pull_git_repository_with_retries(storage)
|
@@ -95,6 +97,7 @@ def git_clone(
|
|
95
97
|
include_submodules: bool = False,
|
96
98
|
access_token: Optional[str] = None,
|
97
99
|
credentials: Optional["Block"] = None,
|
100
|
+
directories: Optional[list[str]] = None,
|
98
101
|
) -> dict[str, str]:
|
99
102
|
"""
|
100
103
|
Clones a git repository into the current working directory.
|
@@ -107,6 +110,7 @@ def git_clone(
|
|
107
110
|
the repository will be cloned using the default git credentials
|
108
111
|
credentials: a GitHubCredentials, GitLabCredentials, or BitBucketCredentials block can be used to specify the
|
109
112
|
credentials to use for cloning the repository.
|
113
|
+
directories: Specify directories you want to be included (uses git sparse-checkout)
|
110
114
|
|
111
115
|
Returns:
|
112
116
|
dict: a dictionary containing a `directory` key of the new directory that was created
|
@@ -164,6 +168,14 @@ def git_clone(
|
|
164
168
|
- prefect.deployments.steps.git_clone:
|
165
169
|
repository: git@github.com:org/repo.git
|
166
170
|
```
|
171
|
+
|
172
|
+
Clone a repository using sparse-checkout (allows specific folders of the repository to be checked out)
|
173
|
+
```yaml
|
174
|
+
pull:
|
175
|
+
- prefect.deployments.steps.git_clone:
|
176
|
+
repository: https://github.com/org/repo.git
|
177
|
+
directories: ["dir_1", "dir_2", "prefect"]
|
178
|
+
```
|
167
179
|
"""
|
168
180
|
if access_token and credentials:
|
169
181
|
raise ValueError(
|
@@ -177,6 +189,7 @@ def git_clone(
|
|
177
189
|
credentials=_credentials,
|
178
190
|
branch=branch,
|
179
191
|
include_submodules=include_submodules,
|
192
|
+
directories=directories,
|
180
193
|
)
|
181
194
|
|
182
195
|
run_coro_as_sync(_pull_git_repository_with_retries(storage))
|