prefect-client 3.0.0rc10__py3-none-any.whl → 3.0.0rc12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_internal/concurrency/api.py +1 -1
- prefect/_internal/concurrency/services.py +9 -0
- prefect/_internal/retries.py +61 -0
- prefect/artifacts.py +12 -0
- prefect/client/cloud.py +1 -1
- prefect/client/schemas/actions.py +4 -0
- prefect/client/schemas/objects.py +1 -1
- prefect/concurrency/asyncio.py +3 -3
- prefect/concurrency/events.py +1 -1
- prefect/concurrency/services.py +3 -2
- prefect/concurrency/sync.py +19 -5
- prefect/context.py +8 -2
- prefect/deployments/__init__.py +28 -15
- prefect/deployments/steps/pull.py +7 -0
- prefect/events/schemas/events.py +10 -0
- prefect/flow_engine.py +10 -9
- prefect/flows.py +194 -68
- prefect/futures.py +53 -7
- prefect/logging/loggers.py +1 -1
- prefect/results.py +1 -46
- prefect/runner/runner.py +96 -23
- prefect/runner/server.py +20 -22
- prefect/runner/submit.py +0 -8
- prefect/runtime/flow_run.py +38 -3
- prefect/settings.py +9 -30
- prefect/task_engine.py +158 -48
- prefect/task_worker.py +1 -1
- prefect/tasks.py +164 -17
- prefect/transactions.py +2 -15
- prefect/utilities/asyncutils.py +13 -9
- prefect/utilities/engine.py +34 -1
- prefect/workers/base.py +98 -208
- prefect/workers/process.py +262 -4
- prefect/workers/server.py +27 -9
- {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/METADATA +4 -4
- {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/RECORD +39 -38
- {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/LICENSE +0 -0
- {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/WHEEL +0 -0
- {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/top_level.txt +0 -0
@@ -151,7 +151,7 @@ class from_async(_base):
|
|
151
151
|
__call: Union[Callable[[], T], Call[T]],
|
152
152
|
timeout: Optional[float] = None,
|
153
153
|
done_callbacks: Optional[Iterable[Call]] = None,
|
154
|
-
) ->
|
154
|
+
) -> T:
|
155
155
|
call = _cast_to_call(__call)
|
156
156
|
waiter = AsyncWaiter(call=call)
|
157
157
|
for callback in done_callbacks or []:
|
@@ -151,6 +151,7 @@ class QueueService(abc.ABC, Generic[T]):
|
|
151
151
|
|
152
152
|
if item is None:
|
153
153
|
logger.debug("Exiting service %r", self)
|
154
|
+
self._queue.task_done()
|
154
155
|
break
|
155
156
|
|
156
157
|
try:
|
@@ -164,6 +165,8 @@ class QueueService(abc.ABC, Generic[T]):
|
|
164
165
|
item,
|
165
166
|
exc_info=log_traceback,
|
166
167
|
)
|
168
|
+
finally:
|
169
|
+
self._queue.task_done()
|
167
170
|
|
168
171
|
@abc.abstractmethod
|
169
172
|
async def _handle(self, item: T):
|
@@ -235,6 +238,12 @@ class QueueService(abc.ABC, Generic[T]):
|
|
235
238
|
else:
|
236
239
|
return concurrent.futures.wait(futures, timeout=timeout)
|
237
240
|
|
241
|
+
def wait_until_empty(self):
|
242
|
+
"""
|
243
|
+
Wait until the queue is empty and all items have been processed.
|
244
|
+
"""
|
245
|
+
self._queue.join()
|
246
|
+
|
238
247
|
@classmethod
|
239
248
|
def instance(cls: Type[Self], *args) -> Self:
|
240
249
|
"""
|
@@ -0,0 +1,61 @@
|
|
1
|
+
import asyncio
|
2
|
+
from functools import wraps
|
3
|
+
from typing import Any, Callable, Tuple, Type
|
4
|
+
|
5
|
+
from prefect.logging.loggers import get_logger
|
6
|
+
from prefect.utilities.math import clamped_poisson_interval
|
7
|
+
|
8
|
+
logger = get_logger("retries")
|
9
|
+
|
10
|
+
|
11
|
+
def exponential_backoff_with_jitter(
|
12
|
+
attempt: int, base_delay: float, max_delay: float
|
13
|
+
) -> float:
|
14
|
+
average_interval = min(base_delay * (2**attempt), max_delay)
|
15
|
+
return clamped_poisson_interval(average_interval, clamping_factor=0.3)
|
16
|
+
|
17
|
+
|
18
|
+
def retry_async_fn(
|
19
|
+
max_attempts: int = 3,
|
20
|
+
backoff_strategy: Callable[
|
21
|
+
[int, float, float], float
|
22
|
+
] = exponential_backoff_with_jitter,
|
23
|
+
base_delay: float = 1,
|
24
|
+
max_delay: float = 10,
|
25
|
+
retry_on_exceptions: Tuple[Type[Exception], ...] = (Exception,),
|
26
|
+
):
|
27
|
+
"""A decorator for retrying an async function.
|
28
|
+
|
29
|
+
Args:
|
30
|
+
max_attempts: The maximum number of times to retry the function.
|
31
|
+
backoff_strategy: A function that takes in the number of attempts, the base
|
32
|
+
delay, and the maximum delay, and returns the delay to use for the next
|
33
|
+
attempt. Defaults to an exponential backoff with jitter.
|
34
|
+
base_delay: The base delay to use for the first attempt.
|
35
|
+
max_delay: The maximum delay to use for the last attempt.
|
36
|
+
retry_on_exceptions: A tuple of exception types to retry on. Defaults to
|
37
|
+
retrying on all exceptions.
|
38
|
+
"""
|
39
|
+
|
40
|
+
def decorator(func):
|
41
|
+
@wraps(func)
|
42
|
+
async def wrapper(*args: Any, **kwargs: Any) -> Any:
|
43
|
+
for attempt in range(max_attempts):
|
44
|
+
try:
|
45
|
+
return await func(*args, **kwargs)
|
46
|
+
except retry_on_exceptions as e:
|
47
|
+
if attempt == max_attempts - 1:
|
48
|
+
logger.exception(
|
49
|
+
f"Function {func.__name__!r} failed after {max_attempts} attempts"
|
50
|
+
)
|
51
|
+
raise
|
52
|
+
delay = backoff_strategy(attempt, base_delay, max_delay)
|
53
|
+
logger.warning(
|
54
|
+
f"Attempt {attempt + 1} of function {func.__name__!r} failed with {type(e).__name__}. "
|
55
|
+
f"Retrying in {delay:.2f} seconds..."
|
56
|
+
)
|
57
|
+
await asyncio.sleep(delay)
|
58
|
+
|
59
|
+
return wrapper
|
60
|
+
|
61
|
+
return decorator
|
prefect/artifacts.py
CHANGED
@@ -6,6 +6,7 @@ from __future__ import annotations
|
|
6
6
|
|
7
7
|
import json # noqa: I001
|
8
8
|
import math
|
9
|
+
import warnings
|
9
10
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
10
11
|
from uuid import UUID
|
11
12
|
|
@@ -54,8 +55,19 @@ class Artifact(ArtifactRequest):
|
|
54
55
|
Returns:
|
55
56
|
- The created artifact.
|
56
57
|
"""
|
58
|
+
from prefect.context import MissingContextError, get_run_context
|
59
|
+
|
57
60
|
client, _ = get_or_create_client(client)
|
58
61
|
task_run_id, flow_run_id = get_task_and_flow_run_ids()
|
62
|
+
|
63
|
+
try:
|
64
|
+
get_run_context()
|
65
|
+
except MissingContextError:
|
66
|
+
warnings.warn(
|
67
|
+
"Artifact creation outside of a flow or task run is deprecated and will be removed in a later version.",
|
68
|
+
FutureWarning,
|
69
|
+
)
|
70
|
+
|
59
71
|
return await client.create_artifact(
|
60
72
|
artifact=ArtifactRequest(
|
61
73
|
type=self.type,
|
prefect/client/cloud.py
CHANGED
@@ -9,7 +9,7 @@ from starlette import status
|
|
9
9
|
import prefect.context
|
10
10
|
import prefect.settings
|
11
11
|
from prefect.client.base import PrefectHttpxAsyncClient
|
12
|
-
from prefect.client.schemas import Workspace
|
12
|
+
from prefect.client.schemas.objects import Workspace
|
13
13
|
from prefect.exceptions import ObjectNotFound, PrefectException
|
14
14
|
from prefect.settings import (
|
15
15
|
PREFECT_API_KEY,
|
@@ -377,6 +377,10 @@ class DeploymentFlowRunCreate(ActionBaseModel):
|
|
377
377
|
parameters: Dict[str, Any] = Field(
|
378
378
|
default_factory=dict, description="The parameters for the flow run."
|
379
379
|
)
|
380
|
+
enforce_parameter_schema: Optional[bool] = Field(
|
381
|
+
default=None,
|
382
|
+
description="Whether or not to enforce the parameter schema on this run.",
|
383
|
+
)
|
380
384
|
context: Dict[str, Any] = Field(
|
381
385
|
default_factory=dict, description="The context for the flow run."
|
382
386
|
)
|
prefect/concurrency/asyncio.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
import asyncio
|
2
2
|
from contextlib import asynccontextmanager
|
3
|
-
from typing import List, Literal, Optional, Union, cast
|
3
|
+
from typing import AsyncGenerator, List, Literal, Optional, Union, cast
|
4
4
|
|
5
5
|
import httpx
|
6
6
|
import pendulum
|
@@ -34,7 +34,7 @@ async def concurrency(
|
|
34
34
|
names: Union[str, List[str]],
|
35
35
|
occupy: int = 1,
|
36
36
|
timeout_seconds: Optional[float] = None,
|
37
|
-
):
|
37
|
+
) -> AsyncGenerator[None, None]:
|
38
38
|
"""A context manager that acquires and releases concurrency slots from the
|
39
39
|
given concurrency limits.
|
40
40
|
|
@@ -77,7 +77,7 @@ async def concurrency(
|
|
77
77
|
_emit_concurrency_release_events(limits, occupy, emitted_events)
|
78
78
|
|
79
79
|
|
80
|
-
async def rate_limit(names: Union[str, List[str]], occupy: int = 1):
|
80
|
+
async def rate_limit(names: Union[str, List[str]], occupy: int = 1) -> None:
|
81
81
|
"""Block execution until an `occupy` number of slots of the concurrency
|
82
82
|
limits given in `names` are acquired. Requires that all given concurrency
|
83
83
|
limits have a slot decay.
|
prefect/concurrency/events.py
CHANGED
@@ -54,6 +54,6 @@ def _emit_concurrency_release_events(
|
|
54
54
|
limits: List[MinimalConcurrencyLimitResponse],
|
55
55
|
occupy: int,
|
56
56
|
events: Dict[UUID, Optional[Event]],
|
57
|
-
):
|
57
|
+
) -> None:
|
58
58
|
for limit in limits:
|
59
59
|
_emit_concurrency_event("released", limit, limits, occupy, events[limit.id])
|
prefect/concurrency/services.py
CHANGED
@@ -3,6 +3,7 @@ import concurrent.futures
|
|
3
3
|
from contextlib import asynccontextmanager
|
4
4
|
from typing import (
|
5
5
|
TYPE_CHECKING,
|
6
|
+
AsyncGenerator,
|
6
7
|
FrozenSet,
|
7
8
|
Optional,
|
8
9
|
Tuple,
|
@@ -27,14 +28,14 @@ class ConcurrencySlotAcquisitionService(QueueService):
|
|
27
28
|
self.concurrency_limit_names = sorted(list(concurrency_limit_names))
|
28
29
|
|
29
30
|
@asynccontextmanager
|
30
|
-
async def _lifespan(self):
|
31
|
+
async def _lifespan(self) -> AsyncGenerator[None, None]:
|
31
32
|
async with get_client() as client:
|
32
33
|
self._client = client
|
33
34
|
yield
|
34
35
|
|
35
36
|
async def _handle(
|
36
37
|
self, item: Tuple[int, str, Optional[float], concurrent.futures.Future]
|
37
|
-
):
|
38
|
+
) -> None:
|
38
39
|
occupy, mode, timeout_seconds, future = item
|
39
40
|
try:
|
40
41
|
response = await self.acquire_slots(occupy, mode, timeout_seconds)
|
prefect/concurrency/sync.py
CHANGED
@@ -1,5 +1,15 @@
|
|
1
1
|
from contextlib import contextmanager
|
2
|
-
from typing import
|
2
|
+
from typing import (
|
3
|
+
Any,
|
4
|
+
Awaitable,
|
5
|
+
Callable,
|
6
|
+
Generator,
|
7
|
+
List,
|
8
|
+
Optional,
|
9
|
+
TypeVar,
|
10
|
+
Union,
|
11
|
+
cast,
|
12
|
+
)
|
3
13
|
|
4
14
|
import pendulum
|
5
15
|
|
@@ -22,13 +32,15 @@ from .events import (
|
|
22
32
|
_emit_concurrency_release_events,
|
23
33
|
)
|
24
34
|
|
35
|
+
T = TypeVar("T")
|
36
|
+
|
25
37
|
|
26
38
|
@contextmanager
|
27
39
|
def concurrency(
|
28
40
|
names: Union[str, List[str]],
|
29
41
|
occupy: int = 1,
|
30
42
|
timeout_seconds: Optional[float] = None,
|
31
|
-
):
|
43
|
+
) -> Generator[None, None, None]:
|
32
44
|
"""A context manager that acquires and releases concurrency slots from the
|
33
45
|
given concurrency limits.
|
34
46
|
|
@@ -75,7 +87,7 @@ def concurrency(
|
|
75
87
|
_emit_concurrency_release_events(limits, occupy, emitted_events)
|
76
88
|
|
77
89
|
|
78
|
-
def rate_limit(names: Union[str, List[str]], occupy: int = 1):
|
90
|
+
def rate_limit(names: Union[str, List[str]], occupy: int = 1) -> None:
|
79
91
|
"""Block execution until an `occupy` number of slots of the concurrency
|
80
92
|
limits given in `names` are acquired. Requires that all given concurrency
|
81
93
|
limits have a slot decay.
|
@@ -91,11 +103,13 @@ def rate_limit(names: Union[str, List[str]], occupy: int = 1):
|
|
91
103
|
_emit_concurrency_acquisition_events(limits, occupy)
|
92
104
|
|
93
105
|
|
94
|
-
def _call_async_function_from_sync(
|
106
|
+
def _call_async_function_from_sync(
|
107
|
+
fn: Callable[..., Awaitable[T]], *args: Any, **kwargs: Any
|
108
|
+
) -> T:
|
95
109
|
loop = get_running_loop()
|
96
110
|
call = create_call(fn, *args, **kwargs)
|
97
111
|
|
98
112
|
if loop is not None:
|
99
113
|
return from_sync.call_soon_in_loop_thread(call).result()
|
100
114
|
else:
|
101
|
-
return call()
|
115
|
+
return call() # type: ignore [return-value]
|
prefect/context.py
CHANGED
@@ -9,6 +9,7 @@ For more user-accessible information about the current run, see [`prefect.runtim
|
|
9
9
|
import os
|
10
10
|
import sys
|
11
11
|
import warnings
|
12
|
+
import weakref
|
12
13
|
from contextlib import ExitStack, contextmanager
|
13
14
|
from contextvars import ContextVar, Token
|
14
15
|
from pathlib import Path
|
@@ -17,6 +18,7 @@ from typing import (
|
|
17
18
|
Any,
|
18
19
|
Dict,
|
19
20
|
Generator,
|
21
|
+
Mapping,
|
20
22
|
Optional,
|
21
23
|
Set,
|
22
24
|
Type,
|
@@ -291,8 +293,12 @@ class EngineContext(RunContext):
|
|
291
293
|
# Counter for flow pauses
|
292
294
|
observed_flow_pauses: Dict[str, int] = Field(default_factory=dict)
|
293
295
|
|
294
|
-
# Tracking for result from task runs in this flow run
|
295
|
-
|
296
|
+
# Tracking for result from task runs in this flow run for dependency tracking
|
297
|
+
# Holds the ID of the object returned by the task run and task run state
|
298
|
+
# This is a weakref dictionary to avoid undermining garbage collection
|
299
|
+
task_run_results: Mapping[int, State] = Field(
|
300
|
+
default_factory=weakref.WeakValueDictionary
|
301
|
+
)
|
296
302
|
|
297
303
|
# Events worker to emit events to Prefect Cloud
|
298
304
|
events: Optional[EventsWorker] = None
|
prefect/deployments/__init__.py
CHANGED
@@ -1,20 +1,33 @@
|
|
1
|
+
from typing import TYPE_CHECKING
|
1
2
|
from prefect._internal.compatibility.migration import getattr_migration
|
2
|
-
import prefect.deployments.base
|
3
|
-
import prefect.deployments.steps
|
4
|
-
from prefect.deployments.base import (
|
5
|
-
initialize_project,
|
6
|
-
)
|
7
3
|
|
8
|
-
from prefect.deployments.runner import (
|
9
|
-
RunnerDeployment,
|
10
|
-
deploy,
|
11
|
-
DockerImage,
|
12
|
-
EntrypointType,
|
13
|
-
)
|
14
4
|
|
5
|
+
if TYPE_CHECKING:
|
6
|
+
from .flow_runs import run_deployment
|
7
|
+
from .base import initialize_project
|
8
|
+
from .runner import deploy
|
15
9
|
|
16
|
-
|
17
|
-
|
18
|
-
)
|
10
|
+
_public_api: dict[str, tuple[str, str]] = {
|
11
|
+
"initialize_project": (__spec__.parent, ".base"),
|
12
|
+
"run_deployment": (__spec__.parent, ".flow_runs"),
|
13
|
+
"deploy": (__spec__.parent, ".runner"),
|
14
|
+
}
|
19
15
|
|
20
|
-
|
16
|
+
# Declare API for type-checkers
|
17
|
+
__all__ = ["initialize_project", "deploy", "run_deployment"]
|
18
|
+
|
19
|
+
|
20
|
+
def __getattr__(attr_name: str) -> object:
|
21
|
+
dynamic_attr = _public_api.get(attr_name)
|
22
|
+
if dynamic_attr is None:
|
23
|
+
return getattr_migration(__name__)(attr_name)
|
24
|
+
|
25
|
+
package, module_name = dynamic_attr
|
26
|
+
|
27
|
+
from importlib import import_module
|
28
|
+
|
29
|
+
if module_name == "__module__":
|
30
|
+
return import_module(f".{attr_name}", package=package)
|
31
|
+
else:
|
32
|
+
module = import_module(module_name, package=package)
|
33
|
+
return getattr(module, attr_name)
|
@@ -6,6 +6,7 @@ import os
|
|
6
6
|
from pathlib import Path
|
7
7
|
from typing import TYPE_CHECKING, Any, Optional
|
8
8
|
|
9
|
+
from prefect._internal.retries import retry_async_fn
|
9
10
|
from prefect.logging.loggers import get_logger
|
10
11
|
from prefect.runner.storage import BlockStorageAdapter, GitRepository, RemoteStorage
|
11
12
|
from prefect.utilities.asyncutils import sync_compatible
|
@@ -31,6 +32,12 @@ def set_working_directory(directory: str) -> dict:
|
|
31
32
|
return dict(directory=directory)
|
32
33
|
|
33
34
|
|
35
|
+
@retry_async_fn(
|
36
|
+
max_attempts=3,
|
37
|
+
base_delay=1,
|
38
|
+
max_delay=10,
|
39
|
+
retry_on_exceptions=(RuntimeError,),
|
40
|
+
)
|
34
41
|
@sync_compatible
|
35
42
|
async def git_clone(
|
36
43
|
repository: str,
|
prefect/events/schemas/events.py
CHANGED
@@ -60,6 +60,16 @@ class Resource(Labelled):
|
|
60
60
|
def name(self) -> Optional[str]:
|
61
61
|
return self.get("prefect.resource.name")
|
62
62
|
|
63
|
+
def prefect_object_id(self, kind: str) -> UUID:
|
64
|
+
"""Extracts the UUID from an event's resource ID if it's the expected kind
|
65
|
+
of prefect resource"""
|
66
|
+
prefix = f"{kind}." if not kind.endswith(".") else kind
|
67
|
+
|
68
|
+
if not self.id.startswith(prefix):
|
69
|
+
raise ValueError(f"Resource ID {self.id} does not start with {prefix}")
|
70
|
+
|
71
|
+
return UUID(self.id[len(prefix) :])
|
72
|
+
|
63
73
|
|
64
74
|
class RelatedResource(Resource):
|
65
75
|
"""A Resource with a specific role in an Event"""
|
prefect/flow_engine.py
CHANGED
@@ -7,7 +7,6 @@ from dataclasses import dataclass, field
|
|
7
7
|
from typing import (
|
8
8
|
Any,
|
9
9
|
AsyncGenerator,
|
10
|
-
Callable,
|
11
10
|
Coroutine,
|
12
11
|
Dict,
|
13
12
|
Generator,
|
@@ -92,9 +91,12 @@ def load_flow_and_flow_run(flow_run_id: UUID) -> Tuple[FlowRun, Flow]:
|
|
92
91
|
|
93
92
|
flow_run = client.read_flow_run(flow_run_id)
|
94
93
|
if entrypoint:
|
95
|
-
flow
|
94
|
+
# we should not accept a placeholder flow at runtime
|
95
|
+
flow = load_flow_from_entrypoint(entrypoint, use_placeholder_flow=False)
|
96
96
|
else:
|
97
|
-
flow = run_coro_as_sync(
|
97
|
+
flow = run_coro_as_sync(
|
98
|
+
load_flow_from_flow_run(flow_run, use_placeholder_flow=False)
|
99
|
+
)
|
98
100
|
|
99
101
|
return flow_run, flow
|
100
102
|
|
@@ -415,7 +417,7 @@ class FlowRunEngine(Generic[P, R]):
|
|
415
417
|
|
416
418
|
return flow_run
|
417
419
|
|
418
|
-
def call_hooks(self, state: Optional[State] = None)
|
420
|
+
def call_hooks(self, state: Optional[State] = None):
|
419
421
|
if state is None:
|
420
422
|
state = self.state
|
421
423
|
flow = self.flow
|
@@ -613,11 +615,7 @@ class FlowRunEngine(Generic[P, R]):
|
|
613
615
|
|
614
616
|
if self.state.is_running():
|
615
617
|
self.call_hooks()
|
616
|
-
|
617
|
-
yield
|
618
|
-
finally:
|
619
|
-
if self.state.is_final() or self.state.is_cancelling():
|
620
|
-
self.call_hooks()
|
618
|
+
yield
|
621
619
|
|
622
620
|
@contextmanager
|
623
621
|
def run_context(self):
|
@@ -638,6 +636,9 @@ class FlowRunEngine(Generic[P, R]):
|
|
638
636
|
except Exception as exc:
|
639
637
|
self.logger.exception("Encountered exception during execution: %r", exc)
|
640
638
|
self.handle_exception(exc)
|
639
|
+
finally:
|
640
|
+
if self.state.is_final() or self.state.is_cancelling():
|
641
|
+
self.call_hooks()
|
641
642
|
|
642
643
|
def call_flow_fn(self) -> Union[R, Coroutine[Any, Any, R]]:
|
643
644
|
"""
|