prefect-client 3.1.12__py3-none-any.whl → 3.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_experimental/lineage.py +63 -0
- prefect/_experimental/sla/client.py +53 -27
- prefect/_experimental/sla/objects.py +10 -2
- prefect/_internal/concurrency/services.py +2 -2
- prefect/_internal/concurrency/threads.py +6 -0
- prefect/_internal/retries.py +6 -3
- prefect/_internal/schemas/validators.py +6 -4
- prefect/_version.py +3 -3
- prefect/artifacts.py +4 -1
- prefect/automations.py +1 -1
- prefect/blocks/abstract.py +5 -2
- prefect/blocks/notifications.py +1 -0
- prefect/cache_policies.py +70 -22
- prefect/client/orchestration/_automations/client.py +4 -0
- prefect/client/orchestration/_deployments/client.py +3 -3
- prefect/client/utilities.py +3 -3
- prefect/context.py +16 -6
- prefect/deployments/base.py +7 -4
- prefect/deployments/flow_runs.py +5 -1
- prefect/deployments/runner.py +6 -11
- prefect/deployments/steps/core.py +1 -1
- prefect/deployments/steps/pull.py +8 -3
- prefect/deployments/steps/utility.py +2 -2
- prefect/docker/docker_image.py +13 -9
- prefect/engine.py +19 -10
- prefect/events/cli/automations.py +4 -4
- prefect/events/clients.py +17 -14
- prefect/events/filters.py +34 -34
- prefect/events/schemas/automations.py +12 -8
- prefect/events/schemas/events.py +5 -1
- prefect/events/worker.py +1 -1
- prefect/filesystems.py +1 -1
- prefect/flow_engine.py +172 -123
- prefect/flows.py +119 -74
- prefect/futures.py +14 -7
- prefect/infrastructure/provisioners/__init__.py +2 -0
- prefect/infrastructure/provisioners/cloud_run.py +4 -4
- prefect/infrastructure/provisioners/coiled.py +249 -0
- prefect/infrastructure/provisioners/container_instance.py +4 -3
- prefect/infrastructure/provisioners/ecs.py +55 -43
- prefect/infrastructure/provisioners/modal.py +5 -4
- prefect/input/actions.py +5 -1
- prefect/input/run_input.py +157 -43
- prefect/logging/configuration.py +5 -8
- prefect/logging/filters.py +2 -2
- prefect/logging/formatters.py +15 -11
- prefect/logging/handlers.py +24 -14
- prefect/logging/highlighters.py +5 -5
- prefect/logging/loggers.py +29 -20
- prefect/main.py +3 -1
- prefect/results.py +166 -86
- prefect/runner/runner.py +112 -84
- prefect/runner/server.py +3 -1
- prefect/runner/storage.py +18 -18
- prefect/runner/submit.py +19 -12
- prefect/runtime/deployment.py +15 -8
- prefect/runtime/flow_run.py +19 -6
- prefect/runtime/task_run.py +7 -3
- prefect/settings/base.py +17 -7
- prefect/settings/legacy.py +4 -4
- prefect/settings/models/api.py +4 -3
- prefect/settings/models/cli.py +4 -3
- prefect/settings/models/client.py +7 -4
- prefect/settings/models/cloud.py +4 -3
- prefect/settings/models/deployments.py +4 -3
- prefect/settings/models/experiments.py +4 -3
- prefect/settings/models/flows.py +4 -3
- prefect/settings/models/internal.py +4 -3
- prefect/settings/models/logging.py +8 -6
- prefect/settings/models/results.py +4 -3
- prefect/settings/models/root.py +11 -16
- prefect/settings/models/runner.py +8 -5
- prefect/settings/models/server/api.py +6 -3
- prefect/settings/models/server/database.py +120 -25
- prefect/settings/models/server/deployments.py +4 -3
- prefect/settings/models/server/ephemeral.py +7 -4
- prefect/settings/models/server/events.py +6 -3
- prefect/settings/models/server/flow_run_graph.py +4 -3
- prefect/settings/models/server/root.py +4 -3
- prefect/settings/models/server/services.py +15 -12
- prefect/settings/models/server/tasks.py +7 -4
- prefect/settings/models/server/ui.py +4 -3
- prefect/settings/models/tasks.py +10 -5
- prefect/settings/models/testing.py +4 -3
- prefect/settings/models/worker.py +7 -4
- prefect/settings/profiles.py +13 -12
- prefect/settings/sources.py +20 -19
- prefect/states.py +17 -13
- prefect/task_engine.py +43 -33
- prefect/task_runners.py +35 -23
- prefect/task_runs.py +20 -11
- prefect/task_worker.py +12 -7
- prefect/tasks.py +67 -25
- prefect/telemetry/bootstrap.py +4 -1
- prefect/telemetry/run_telemetry.py +15 -13
- prefect/transactions.py +3 -3
- prefect/types/__init__.py +9 -6
- prefect/types/_datetime.py +19 -0
- prefect/utilities/_deprecated.py +38 -0
- prefect/utilities/engine.py +11 -4
- prefect/utilities/filesystem.py +2 -2
- prefect/utilities/generics.py +1 -1
- prefect/utilities/pydantic.py +21 -36
- prefect/workers/base.py +52 -30
- prefect/workers/process.py +20 -15
- prefect/workers/server.py +4 -5
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/METADATA +2 -2
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/RECORD +111 -108
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/LICENSE +0 -0
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/WHEEL +0 -0
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/top_level.txt +0 -0
prefect/task_runners.py
CHANGED
@@ -40,6 +40,8 @@ from prefect.utilities.callables import (
|
|
40
40
|
from prefect.utilities.collections import isiterable
|
41
41
|
|
42
42
|
if TYPE_CHECKING:
|
43
|
+
import logging
|
44
|
+
|
43
45
|
from prefect.tasks import Task
|
44
46
|
|
45
47
|
P = ParamSpec("P")
|
@@ -61,11 +63,11 @@ class TaskRunner(abc.ABC, Generic[F]):
|
|
61
63
|
"""
|
62
64
|
|
63
65
|
def __init__(self):
|
64
|
-
self.logger = get_logger(f"task_runner.{self.name}")
|
66
|
+
self.logger: "logging.Logger" = get_logger(f"task_runner.{self.name}")
|
65
67
|
self._started = False
|
66
68
|
|
67
69
|
@property
|
68
|
-
def name(self):
|
70
|
+
def name(self) -> str:
|
69
71
|
"""The name of this task runner"""
|
70
72
|
return type(self).__name__.lower().replace("taskrunner", "")
|
71
73
|
|
@@ -74,32 +76,42 @@ class TaskRunner(abc.ABC, Generic[F]):
|
|
74
76
|
"""Return a new instance of this task runner with the same configuration."""
|
75
77
|
...
|
76
78
|
|
79
|
+
@overload
|
77
80
|
@abc.abstractmethod
|
78
81
|
def submit(
|
79
82
|
self,
|
80
|
-
task: "Task[P, R]",
|
83
|
+
task: "Task[P, Coroutine[Any, Any, R]]",
|
81
84
|
parameters: dict[str, Any],
|
82
85
|
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
83
86
|
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
84
87
|
) -> F:
|
85
|
-
|
86
|
-
Submit a task to the task run engine.
|
88
|
+
...
|
87
89
|
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
90
|
+
@overload
|
91
|
+
@abc.abstractmethod
|
92
|
+
def submit(
|
93
|
+
self,
|
94
|
+
task: "Task[Any, R]",
|
95
|
+
parameters: dict[str, Any],
|
96
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
97
|
+
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
98
|
+
) -> F:
|
99
|
+
...
|
92
100
|
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
""
|
101
|
+
@abc.abstractmethod
|
102
|
+
def submit(
|
103
|
+
self,
|
104
|
+
task: "Task[P, R]",
|
105
|
+
parameters: dict[str, Any],
|
106
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
107
|
+
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
108
|
+
) -> F:
|
97
109
|
...
|
98
110
|
|
99
111
|
def map(
|
100
112
|
self,
|
101
113
|
task: "Task[P, R]",
|
102
|
-
parameters: dict[str, Any],
|
114
|
+
parameters: dict[str, Any | unmapped[Any] | allow_failure[Any]],
|
103
115
|
wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
|
104
116
|
) -> PrefectFutureList[F]:
|
105
117
|
"""
|
@@ -205,7 +217,7 @@ class TaskRunner(abc.ABC, Generic[F]):
|
|
205
217
|
|
206
218
|
return PrefectFutureList(futures)
|
207
219
|
|
208
|
-
def __enter__(self):
|
220
|
+
def __enter__(self) -> Self:
|
209
221
|
if self._started:
|
210
222
|
raise RuntimeError("This task runner is already started")
|
211
223
|
|
@@ -213,12 +225,12 @@ class TaskRunner(abc.ABC, Generic[F]):
|
|
213
225
|
self._started = True
|
214
226
|
return self
|
215
227
|
|
216
|
-
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any):
|
228
|
+
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
217
229
|
self.logger.debug("Stopping task runner")
|
218
230
|
self._started = False
|
219
231
|
|
220
232
|
|
221
|
-
class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[
|
233
|
+
class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[R]]):
|
222
234
|
def __init__(self, max_workers: Optional[int] = None):
|
223
235
|
super().__init__()
|
224
236
|
self._executor: Optional[ThreadPoolExecutor] = None
|
@@ -229,7 +241,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[Any]]):
|
|
229
241
|
)
|
230
242
|
self._cancel_events: Dict[uuid.UUID, threading.Event] = {}
|
231
243
|
|
232
|
-
def duplicate(self) -> "ThreadPoolTaskRunner":
|
244
|
+
def duplicate(self) -> "ThreadPoolTaskRunner[R]":
|
233
245
|
return type(self)(max_workers=self._max_workers)
|
234
246
|
|
235
247
|
@overload
|
@@ -254,7 +266,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[Any]]):
|
|
254
266
|
|
255
267
|
def submit(
|
256
268
|
self,
|
257
|
-
task: "Task[P, R]",
|
269
|
+
task: "Task[P, R | Coroutine[Any, Any, R]]",
|
258
270
|
parameters: dict[str, Any],
|
259
271
|
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
260
272
|
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
@@ -345,7 +357,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[Any]]):
|
|
345
357
|
) -> PrefectFutureList[PrefectConcurrentFuture[R]]:
|
346
358
|
return super().map(task, parameters, wait_for)
|
347
359
|
|
348
|
-
def cancel_all(self):
|
360
|
+
def cancel_all(self) -> None:
|
349
361
|
for event in self._cancel_events.values():
|
350
362
|
event.set()
|
351
363
|
self.logger.debug("Set cancel event")
|
@@ -354,12 +366,12 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[Any]]):
|
|
354
366
|
self._executor.shutdown(cancel_futures=True)
|
355
367
|
self._executor = None
|
356
368
|
|
357
|
-
def __enter__(self):
|
369
|
+
def __enter__(self) -> Self:
|
358
370
|
super().__enter__()
|
359
371
|
self._executor = ThreadPoolExecutor(max_workers=self._max_workers)
|
360
372
|
return self
|
361
373
|
|
362
|
-
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any):
|
374
|
+
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
363
375
|
self.cancel_all()
|
364
376
|
if self._executor is not None:
|
365
377
|
self._executor.shutdown(cancel_futures=True)
|
@@ -380,7 +392,7 @@ class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture[R]]):
|
|
380
392
|
def __init__(self):
|
381
393
|
super().__init__()
|
382
394
|
|
383
|
-
def duplicate(self) -> "PrefectTaskRunner":
|
395
|
+
def duplicate(self) -> "PrefectTaskRunner[R]":
|
384
396
|
return type(self)()
|
385
397
|
|
386
398
|
@overload
|
prefect/task_runs.py
CHANGED
@@ -1,8 +1,10 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
import asyncio
|
2
4
|
import atexit
|
3
5
|
import threading
|
4
6
|
import uuid
|
5
|
-
from typing import Callable, Dict, Optional
|
7
|
+
from typing import TYPE_CHECKING, Callable, Dict, Optional
|
6
8
|
|
7
9
|
import anyio
|
8
10
|
from cachetools import TTLCache
|
@@ -15,6 +17,9 @@ from prefect.events.clients import get_events_subscriber
|
|
15
17
|
from prefect.events.filters import EventFilter, EventNameFilter
|
16
18
|
from prefect.logging.loggers import get_logger
|
17
19
|
|
20
|
+
if TYPE_CHECKING:
|
21
|
+
import logging
|
22
|
+
|
18
23
|
|
19
24
|
class TaskRunWaiter:
|
20
25
|
"""
|
@@ -68,19 +73,19 @@ class TaskRunWaiter:
|
|
68
73
|
_instance_lock = threading.Lock()
|
69
74
|
|
70
75
|
def __init__(self):
|
71
|
-
self.logger = get_logger("TaskRunWaiter")
|
72
|
-
self._consumer_task:
|
76
|
+
self.logger: "logging.Logger" = get_logger("TaskRunWaiter")
|
77
|
+
self._consumer_task: "asyncio.Task[None] | None" = None
|
73
78
|
self._observed_completed_task_runs: TTLCache[uuid.UUID, bool] = TTLCache(
|
74
79
|
maxsize=10000, ttl=600
|
75
80
|
)
|
76
81
|
self._completion_events: Dict[uuid.UUID, asyncio.Event] = {}
|
77
|
-
self._completion_callbacks: Dict[uuid.UUID, Callable] = {}
|
82
|
+
self._completion_callbacks: Dict[uuid.UUID, Callable[[], None]] = {}
|
78
83
|
self._loop: Optional[asyncio.AbstractEventLoop] = None
|
79
84
|
self._observed_completed_task_runs_lock = threading.Lock()
|
80
85
|
self._completion_events_lock = threading.Lock()
|
81
86
|
self._started = False
|
82
87
|
|
83
|
-
def start(self):
|
88
|
+
def start(self) -> None:
|
84
89
|
"""
|
85
90
|
Start the TaskRunWaiter service.
|
86
91
|
"""
|
@@ -89,10 +94,12 @@ class TaskRunWaiter:
|
|
89
94
|
self.logger.debug("Starting TaskRunWaiter")
|
90
95
|
loop_thread = get_global_loop()
|
91
96
|
|
92
|
-
if not asyncio.get_running_loop() == loop_thread.
|
97
|
+
if not asyncio.get_running_loop() == loop_thread.loop:
|
93
98
|
raise RuntimeError("TaskRunWaiter must run on the global loop thread.")
|
94
99
|
|
95
|
-
self._loop = loop_thread.
|
100
|
+
self._loop = loop_thread.loop
|
101
|
+
if TYPE_CHECKING:
|
102
|
+
assert self._loop is not None
|
96
103
|
|
97
104
|
consumer_started = asyncio.Event()
|
98
105
|
self._consumer_task = self._loop.create_task(
|
@@ -141,7 +148,7 @@ class TaskRunWaiter:
|
|
141
148
|
except Exception as exc:
|
142
149
|
self.logger.error(f"Error processing event: {exc}")
|
143
150
|
|
144
|
-
def stop(self):
|
151
|
+
def stop(self) -> None:
|
145
152
|
"""
|
146
153
|
Stop the TaskRunWaiter service.
|
147
154
|
"""
|
@@ -155,7 +162,7 @@ class TaskRunWaiter:
|
|
155
162
|
@classmethod
|
156
163
|
async def wait_for_task_run(
|
157
164
|
cls, task_run_id: uuid.UUID, timeout: Optional[float] = None
|
158
|
-
):
|
165
|
+
) -> None:
|
159
166
|
"""
|
160
167
|
Wait for a task run to finish.
|
161
168
|
|
@@ -199,7 +206,9 @@ class TaskRunWaiter:
|
|
199
206
|
instance._completion_events.pop(task_run_id, None)
|
200
207
|
|
201
208
|
@classmethod
|
202
|
-
def add_done_callback(
|
209
|
+
def add_done_callback(
|
210
|
+
cls, task_run_id: uuid.UUID, callback: Callable[[], None]
|
211
|
+
) -> None:
|
203
212
|
"""
|
204
213
|
Add a callback to be called when a task run finishes.
|
205
214
|
|
@@ -219,7 +228,7 @@ class TaskRunWaiter:
|
|
219
228
|
instance._completion_callbacks[task_run_id] = callback
|
220
229
|
|
221
230
|
@classmethod
|
222
|
-
def instance(cls):
|
231
|
+
def instance(cls) -> Self:
|
223
232
|
"""
|
224
233
|
Get the singleton instance of TaskRunWaiter.
|
225
234
|
"""
|
prefect/task_worker.py
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
import asyncio
|
2
4
|
import inspect
|
3
5
|
import os
|
@@ -16,7 +18,7 @@ import pendulum
|
|
16
18
|
import uvicorn
|
17
19
|
from exceptiongroup import BaseExceptionGroup # novermin
|
18
20
|
from fastapi import FastAPI
|
19
|
-
from typing_extensions import ParamSpec, TypeVar
|
21
|
+
from typing_extensions import ParamSpec, Self, TypeVar
|
20
22
|
from websockets.exceptions import InvalidStatusCode
|
21
23
|
|
22
24
|
from prefect import Task
|
@@ -42,7 +44,10 @@ from prefect.utilities.processutils import (
|
|
42
44
|
from prefect.utilities.services import start_client_metrics_server
|
43
45
|
from prefect.utilities.urls import url_for
|
44
46
|
|
45
|
-
|
47
|
+
if TYPE_CHECKING:
|
48
|
+
import logging
|
49
|
+
|
50
|
+
logger: "logging.Logger" = get_logger("task_worker")
|
46
51
|
|
47
52
|
P = ParamSpec("P")
|
48
53
|
R = TypeVar("R", infer_variance=True)
|
@@ -85,7 +90,7 @@ class TaskWorker:
|
|
85
90
|
def __init__(
|
86
91
|
self,
|
87
92
|
*tasks: Task[P, R],
|
88
|
-
limit:
|
93
|
+
limit: int | None = 10,
|
89
94
|
):
|
90
95
|
self.tasks: list["Task[..., Any]"] = []
|
91
96
|
for t in tasks:
|
@@ -100,7 +105,7 @@ class TaskWorker:
|
|
100
105
|
else:
|
101
106
|
self.tasks.append(t.with_options(persist_result=True))
|
102
107
|
|
103
|
-
self.task_keys = set(t.task_key for t in tasks if isinstance(t, Task)) # pyright: ignore[reportUnnecessaryIsInstance]
|
108
|
+
self.task_keys: set[str] = set(t.task_key for t in tasks if isinstance(t, Task)) # pyright: ignore[reportUnnecessaryIsInstance]
|
104
109
|
|
105
110
|
self._started_at: Optional[pendulum.DateTime] = None
|
106
111
|
self.stopping: bool = False
|
@@ -154,7 +159,7 @@ class TaskWorker:
|
|
154
159
|
def available_tasks(self) -> Optional[int]:
|
155
160
|
return int(self._limiter.available_tokens) if self._limiter else None
|
156
161
|
|
157
|
-
def handle_sigterm(self, signum: int, frame: object):
|
162
|
+
def handle_sigterm(self, signum: int, frame: object) -> None:
|
158
163
|
"""
|
159
164
|
Shuts down the task worker when a SIGTERM is received.
|
160
165
|
"""
|
@@ -355,14 +360,14 @@ class TaskWorker:
|
|
355
360
|
)
|
356
361
|
await asyncio.wrap_future(future)
|
357
362
|
|
358
|
-
async def execute_task_run(self, task_run: TaskRun):
|
363
|
+
async def execute_task_run(self, task_run: TaskRun) -> None:
|
359
364
|
"""Execute a task run in the task worker."""
|
360
365
|
async with self if not self.started else asyncnullcontext():
|
361
366
|
token_acquired = await self._acquire_token(task_run.id)
|
362
367
|
if token_acquired:
|
363
368
|
await self._safe_submit_scheduled_task_run(task_run)
|
364
369
|
|
365
|
-
async def __aenter__(self):
|
370
|
+
async def __aenter__(self) -> Self:
|
366
371
|
logger.debug("Starting task worker...")
|
367
372
|
|
368
373
|
if self._client._closed: # pyright: ignore[reportPrivateUsage]
|
prefect/tasks.py
CHANGED
@@ -53,10 +53,7 @@ from prefect.results import (
|
|
53
53
|
ResultStore,
|
54
54
|
get_or_create_default_task_scheduling_storage,
|
55
55
|
)
|
56
|
-
from prefect.settings import
|
57
|
-
PREFECT_TASK_DEFAULT_RETRIES,
|
58
|
-
PREFECT_TASK_DEFAULT_RETRY_DELAY_SECONDS,
|
59
|
-
)
|
56
|
+
from prefect.settings.context import get_current_settings
|
60
57
|
from prefect.states import Pending, Scheduled, State
|
61
58
|
from prefect.utilities.annotations import NotSet
|
62
59
|
from prefect.utilities.asyncutils import run_coro_as_sync, sync_compatible
|
@@ -70,6 +67,8 @@ from prefect.utilities.importtools import to_qualified_name
|
|
70
67
|
from prefect.utilities.urls import url_for
|
71
68
|
|
72
69
|
if TYPE_CHECKING:
|
70
|
+
import logging
|
71
|
+
|
73
72
|
from prefect.client.orchestration import PrefectClient
|
74
73
|
from prefect.context import TaskRunContext
|
75
74
|
from prefect.transactions import Transaction
|
@@ -80,7 +79,7 @@ P = ParamSpec("P") # The parameters of the task
|
|
80
79
|
|
81
80
|
NUM_CHARS_DYNAMIC_KEY = 8
|
82
81
|
|
83
|
-
logger = get_logger("tasks")
|
82
|
+
logger: "logging.Logger" = get_logger("tasks")
|
84
83
|
|
85
84
|
FutureOrResult: TypeAlias = Union[PrefectFuture[T], T]
|
86
85
|
OneOrManyFutureOrResult: TypeAlias = Union[
|
@@ -383,19 +382,19 @@ class Task(Generic[P, R]):
|
|
383
382
|
if not callable(fn):
|
384
383
|
raise TypeError("'fn' must be callable")
|
385
384
|
|
386
|
-
self.description = description or inspect.getdoc(fn)
|
385
|
+
self.description: str | None = description or inspect.getdoc(fn)
|
387
386
|
update_wrapper(self, fn)
|
388
387
|
self.fn = fn
|
389
388
|
|
390
389
|
# the task is considered async if its function is async or an async
|
391
390
|
# generator
|
392
|
-
self.isasync = asyncio.iscoroutinefunction(
|
391
|
+
self.isasync: bool = asyncio.iscoroutinefunction(
|
393
392
|
self.fn
|
394
393
|
) or inspect.isasyncgenfunction(self.fn)
|
395
394
|
|
396
395
|
# the task is considered a generator if its function is a generator or
|
397
396
|
# an async generator
|
398
|
-
self.isgenerator = inspect.isgeneratorfunction(
|
397
|
+
self.isgenerator: bool = inspect.isgeneratorfunction(
|
399
398
|
self.fn
|
400
399
|
) or inspect.isasyncgenfunction(self.fn)
|
401
400
|
|
@@ -405,7 +404,7 @@ class Task(Generic[P, R]):
|
|
405
404
|
else:
|
406
405
|
self.name = self.fn.__name__
|
407
406
|
else:
|
408
|
-
self.name = name
|
407
|
+
self.name: str = name
|
409
408
|
|
410
409
|
if task_run_name is not None:
|
411
410
|
if not isinstance(task_run_name, str) and not callable(task_run_name):
|
@@ -420,9 +419,9 @@ class Task(Generic[P, R]):
|
|
420
419
|
|
421
420
|
raise_for_reserved_arguments(self.fn, ["return_state", "wait_for"])
|
422
421
|
|
423
|
-
self.tags = set(tags if tags else [])
|
422
|
+
self.tags: set[str] = set(tags if tags else [])
|
424
423
|
|
425
|
-
self.task_key = _generate_task_key(self.fn)
|
424
|
+
self.task_key: str = _generate_task_key(self.fn)
|
426
425
|
|
427
426
|
if cache_policy is not NotSet and cache_key_fn is not None:
|
428
427
|
logger.warning(
|
@@ -464,26 +463,29 @@ class Task(Generic[P, R]):
|
|
464
463
|
# TODO: handle this situation with double storage
|
465
464
|
self.cache_policy = None
|
466
465
|
else:
|
467
|
-
self.cache_policy = cache_policy
|
466
|
+
self.cache_policy: Union[CachePolicy, type[NotSet], None] = cache_policy
|
468
467
|
|
469
468
|
# TaskRunPolicy settings
|
470
469
|
# TODO: We can instantiate a `TaskRunPolicy` and add Pydantic bound checks to
|
471
470
|
# validate that the user passes positive numbers here
|
472
471
|
|
473
|
-
|
474
|
-
|
472
|
+
settings = get_current_settings()
|
473
|
+
self.retries: int = (
|
474
|
+
retries if retries is not None else settings.tasks.default_retries
|
475
475
|
)
|
476
476
|
if retry_delay_seconds is None:
|
477
|
-
retry_delay_seconds =
|
477
|
+
retry_delay_seconds = settings.tasks.default_retry_delay_seconds
|
478
478
|
|
479
479
|
if callable(retry_delay_seconds):
|
480
|
-
self.retry_delay_seconds = retry_delay_seconds(retries)
|
480
|
+
self.retry_delay_seconds = retry_delay_seconds(self.retries)
|
481
481
|
elif not isinstance(retry_delay_seconds, (list, int, float, type(None))):
|
482
482
|
raise TypeError(
|
483
483
|
f"Invalid `retry_delay_seconds` provided; must be an int, float, list or callable. Received type {type(retry_delay_seconds)}"
|
484
484
|
)
|
485
485
|
else:
|
486
|
-
self.retry_delay_seconds
|
486
|
+
self.retry_delay_seconds: Union[
|
487
|
+
float, int, list[float], None
|
488
|
+
] = retry_delay_seconds
|
487
489
|
|
488
490
|
if isinstance(self.retry_delay_seconds, list) and (
|
489
491
|
len(self.retry_delay_seconds) > 50
|
@@ -507,11 +509,15 @@ class Task(Generic[P, R]):
|
|
507
509
|
self.result_serializer = result_serializer
|
508
510
|
self.result_storage_key = result_storage_key
|
509
511
|
self.cache_result_in_memory = cache_result_in_memory
|
510
|
-
self.timeout_seconds
|
511
|
-
|
512
|
-
|
513
|
-
self.
|
514
|
-
|
512
|
+
self.timeout_seconds: Union[float, None] = (
|
513
|
+
float(timeout_seconds) if timeout_seconds else None
|
514
|
+
)
|
515
|
+
self.on_rollback_hooks: list[Callable[["Transaction"], None]] = (
|
516
|
+
on_rollback or []
|
517
|
+
)
|
518
|
+
self.on_commit_hooks: list[Callable[["Transaction"], None]] = on_commit or []
|
519
|
+
self.on_completion_hooks: list[StateHookCallable] = on_completion or []
|
520
|
+
self.on_failure_hooks: list[StateHookCallable] = on_failure or []
|
515
521
|
|
516
522
|
# retry_condition_fn must be a callable or None. If it is neither, raise a TypeError
|
517
523
|
if retry_condition_fn is not None and not (callable(retry_condition_fn)):
|
@@ -527,7 +533,7 @@ class Task(Generic[P, R]):
|
|
527
533
|
def ismethod(self) -> bool:
|
528
534
|
return hasattr(self.fn, "__prefect_self__")
|
529
535
|
|
530
|
-
def __get__(self, instance: Any, owner: Any):
|
536
|
+
def __get__(self, instance: Any, owner: Any) -> "Task[P, R]":
|
531
537
|
"""
|
532
538
|
Implement the descriptor protocol so that the task can be used as an instance method.
|
533
539
|
When an instance method is loaded, this method is called with the "self" instance as
|
@@ -582,7 +588,7 @@ class Task(Generic[P, R]):
|
|
582
588
|
Callable[["Task[..., Any]", TaskRun, State], bool]
|
583
589
|
] = None,
|
584
590
|
viz_return_value: Optional[Any] = None,
|
585
|
-
):
|
591
|
+
) -> "Task[P, R]":
|
586
592
|
"""
|
587
593
|
Create a new task from the current object, updating provided options.
|
588
594
|
|
@@ -1636,7 +1642,43 @@ def task(
|
|
1636
1642
|
refresh_cache: Optional[bool] = None,
|
1637
1643
|
on_completion: Optional[list[StateHookCallable]] = None,
|
1638
1644
|
on_failure: Optional[list[StateHookCallable]] = None,
|
1639
|
-
retry_condition_fn:
|
1645
|
+
retry_condition_fn: Literal[None] = None,
|
1646
|
+
viz_return_value: Any = None,
|
1647
|
+
) -> Callable[[Callable[P, R]], Task[P, R]]:
|
1648
|
+
...
|
1649
|
+
|
1650
|
+
|
1651
|
+
# see https://github.com/PrefectHQ/prefect/issues/16380
|
1652
|
+
@overload
|
1653
|
+
def task(
|
1654
|
+
__fn: Literal[None] = None,
|
1655
|
+
*,
|
1656
|
+
name: Optional[str] = None,
|
1657
|
+
description: Optional[str] = None,
|
1658
|
+
tags: Optional[Iterable[str]] = None,
|
1659
|
+
version: Optional[str] = None,
|
1660
|
+
cache_policy: Union[CachePolicy, type[NotSet]] = NotSet,
|
1661
|
+
cache_key_fn: Optional[
|
1662
|
+
Callable[["TaskRunContext", dict[str, Any]], Optional[str]]
|
1663
|
+
] = None,
|
1664
|
+
cache_expiration: Optional[datetime.timedelta] = None,
|
1665
|
+
task_run_name: Optional[TaskRunNameValueOrCallable] = None,
|
1666
|
+
retries: int = 0,
|
1667
|
+
retry_delay_seconds: Union[
|
1668
|
+
float, int, list[float], Callable[[int], list[float]], None
|
1669
|
+
] = None,
|
1670
|
+
retry_jitter_factor: Optional[float] = None,
|
1671
|
+
persist_result: Optional[bool] = None,
|
1672
|
+
result_storage: Optional[ResultStorage] = None,
|
1673
|
+
result_storage_key: Optional[str] = None,
|
1674
|
+
result_serializer: Optional[ResultSerializer] = None,
|
1675
|
+
cache_result_in_memory: bool = True,
|
1676
|
+
timeout_seconds: Union[int, float, None] = None,
|
1677
|
+
log_prints: Optional[bool] = None,
|
1678
|
+
refresh_cache: Optional[bool] = None,
|
1679
|
+
on_completion: Optional[list[StateHookCallable]] = None,
|
1680
|
+
on_failure: Optional[list[StateHookCallable]] = None,
|
1681
|
+
retry_condition_fn: Optional[Callable[[Task[P, R], TaskRun, State], bool]] = None,
|
1640
1682
|
viz_return_value: Any = None,
|
1641
1683
|
) -> Callable[[Callable[P, R]], Task[P, R]]:
|
1642
1684
|
...
|
prefect/telemetry/bootstrap.py
CHANGED
@@ -4,7 +4,10 @@ import prefect.settings
|
|
4
4
|
from prefect.client.base import ServerType, determine_server_type
|
5
5
|
from prefect.logging.loggers import get_logger
|
6
6
|
|
7
|
-
|
7
|
+
if TYPE_CHECKING:
|
8
|
+
import logging
|
9
|
+
|
10
|
+
logger: "logging.Logger" = get_logger(__name__)
|
8
11
|
|
9
12
|
if TYPE_CHECKING:
|
10
13
|
from opentelemetry.sdk._logs import LoggerProvider
|
@@ -1,6 +1,8 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
import time
|
2
4
|
from dataclasses import dataclass, field
|
3
|
-
from typing import TYPE_CHECKING, Any,
|
5
|
+
from typing import TYPE_CHECKING, Any, Union
|
4
6
|
|
5
7
|
from opentelemetry import propagate, trace
|
6
8
|
from opentelemetry.context import Context
|
@@ -47,14 +49,14 @@ class RunTelemetry:
|
|
47
49
|
_tracer: "Tracer" = field(
|
48
50
|
default_factory=lambda: get_tracer("prefect", prefect.__version__)
|
49
51
|
)
|
50
|
-
span:
|
52
|
+
span: Span | None = None
|
51
53
|
|
52
54
|
async def async_start_span(
|
53
55
|
self,
|
54
56
|
run: FlowOrTaskRun,
|
55
57
|
client: PrefectClient,
|
56
|
-
parameters:
|
57
|
-
):
|
58
|
+
parameters: dict[str, Any] | None = None,
|
59
|
+
) -> Span:
|
58
60
|
traceparent, span = self._start_span(run, parameters)
|
59
61
|
|
60
62
|
if self._run_type(run) == "flow" and traceparent:
|
@@ -70,8 +72,8 @@ class RunTelemetry:
|
|
70
72
|
self,
|
71
73
|
run: FlowOrTaskRun,
|
72
74
|
client: SyncPrefectClient,
|
73
|
-
parameters:
|
74
|
-
):
|
75
|
+
parameters: dict[str, Any] | None = None,
|
76
|
+
) -> Span:
|
75
77
|
traceparent, span = self._start_span(run, parameters)
|
76
78
|
|
77
79
|
if self._run_type(run) == "flow" and traceparent:
|
@@ -84,8 +86,8 @@ class RunTelemetry:
|
|
84
86
|
def _start_span(
|
85
87
|
self,
|
86
88
|
run: FlowOrTaskRun,
|
87
|
-
parameters:
|
88
|
-
) -> tuple[
|
89
|
+
parameters: dict[str, Any] | None = None,
|
90
|
+
) -> tuple[str | None, Span]:
|
89
91
|
"""
|
90
92
|
Start a span for a run.
|
91
93
|
"""
|
@@ -139,8 +141,8 @@ class RunTelemetry:
|
|
139
141
|
return "task" if isinstance(run, TaskRun) else "flow"
|
140
142
|
|
141
143
|
def _trace_context_from_labels(
|
142
|
-
self, labels:
|
143
|
-
) ->
|
144
|
+
self, labels: KeyValueLabels | None
|
145
|
+
) -> Context | None:
|
144
146
|
"""Get trace context from run labels if it exists."""
|
145
147
|
if not labels or LABELS_TRACEPARENT_KEY not in labels:
|
146
148
|
return None
|
@@ -148,7 +150,7 @@ class RunTelemetry:
|
|
148
150
|
carrier = {TRACEPARENT_KEY: traceparent}
|
149
151
|
return propagate.extract(carrier)
|
150
152
|
|
151
|
-
def _traceparent_from_span(self, span: Span) ->
|
153
|
+
def _traceparent_from_span(self, span: Span) -> str | None:
|
152
154
|
carrier: dict[str, Any] = {}
|
153
155
|
propagate.inject(carrier, context=trace.set_span_in_context(span))
|
154
156
|
return carrier.get(TRACEPARENT_KEY)
|
@@ -162,7 +164,7 @@ class RunTelemetry:
|
|
162
164
|
self.span.end(time.time_ns())
|
163
165
|
self.span = None
|
164
166
|
|
165
|
-
def end_span_on_failure(self, terminal_message:
|
167
|
+
def end_span_on_failure(self, terminal_message: str | None = None) -> None:
|
166
168
|
"""
|
167
169
|
End a span for a run on failure.
|
168
170
|
"""
|
@@ -203,7 +205,7 @@ class RunTelemetry:
|
|
203
205
|
self.span.update_name(name=name)
|
204
206
|
self.span.set_attribute("prefect.run.name", name)
|
205
207
|
|
206
|
-
def _parent_run(self) ->
|
208
|
+
def _parent_run(self) -> FlowOrTaskRun | None:
|
207
209
|
"""
|
208
210
|
Identify the "parent run" for the current execution context.
|
209
211
|
|
prefect/transactions.py
CHANGED
@@ -173,7 +173,7 @@ class Transaction(ContextModel):
|
|
173
173
|
def is_active(self) -> bool:
|
174
174
|
return self.state == TransactionState.ACTIVE
|
175
175
|
|
176
|
-
def __enter__(self):
|
176
|
+
def __enter__(self) -> Self:
|
177
177
|
if self._token is not None:
|
178
178
|
raise RuntimeError(
|
179
179
|
"Context already entered. Context enter calls cannot be nested."
|
@@ -206,7 +206,7 @@ class Transaction(ContextModel):
|
|
206
206
|
self._token = self.__var__.set(self)
|
207
207
|
return self
|
208
208
|
|
209
|
-
def __exit__(self, *exc_info: Any):
|
209
|
+
def __exit__(self, *exc_info: Any) -> None:
|
210
210
|
exc_type, exc_val, _ = exc_info
|
211
211
|
if not self._token:
|
212
212
|
raise RuntimeError(
|
@@ -235,7 +235,7 @@ class Transaction(ContextModel):
|
|
235
235
|
|
236
236
|
self.reset()
|
237
237
|
|
238
|
-
def begin(self):
|
238
|
+
def begin(self) -> None:
|
239
239
|
if (
|
240
240
|
self.store
|
241
241
|
and self.key
|
prefect/types/__init__.py
CHANGED
@@ -1,10 +1,13 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
from functools import partial
|
2
4
|
from typing import Annotated, Any, Dict, List, Optional, Set, TypeVar, Union
|
3
|
-
from typing_extensions import Literal
|
5
|
+
from typing_extensions import Literal
|
4
6
|
import orjson
|
5
7
|
import pydantic
|
6
|
-
|
7
|
-
|
8
|
+
|
9
|
+
|
10
|
+
from ._datetime import DateTime, Date
|
8
11
|
from pydantic import (
|
9
12
|
BeforeValidator,
|
10
13
|
Field,
|
@@ -35,8 +38,6 @@ TimeZone = Annotated[
|
|
35
38
|
),
|
36
39
|
]
|
37
40
|
|
38
|
-
DateTime: TypeAlias = PydanticDateTime
|
39
|
-
Date: TypeAlias = PydanticDate
|
40
41
|
|
41
42
|
BANNED_CHARACTERS = ["/", "%", "&", ">", "<"]
|
42
43
|
|
@@ -114,7 +115,7 @@ class SecretDict(pydantic.Secret[Dict[str, Any]]):
|
|
114
115
|
|
115
116
|
|
116
117
|
def validate_set_T_from_delim_string(
|
117
|
-
value: Union[str, T, Set[T], None], type_, delim=None
|
118
|
+
value: Union[str, T, Set[T], None], type_: type[T], delim: str | None = None
|
118
119
|
) -> Set[T]:
|
119
120
|
"""
|
120
121
|
"no-info" before validator useful in scooping env vars
|
@@ -169,6 +170,8 @@ KeyValueLabelsField = Annotated[
|
|
169
170
|
|
170
171
|
__all__ = [
|
171
172
|
"ClientRetryExtraCodes",
|
173
|
+
"Date",
|
174
|
+
"DateTime",
|
172
175
|
"LogLevel",
|
173
176
|
"KeyValueLabelsField",
|
174
177
|
"NonNegativeInteger",
|