prefect-client 3.1.10__py3-none-any.whl → 3.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_experimental/lineage.py +7 -8
- prefect/_experimental/sla/__init__.py +0 -0
- prefect/_experimental/sla/client.py +66 -0
- prefect/_experimental/sla/objects.py +53 -0
- prefect/_internal/_logging.py +15 -3
- prefect/_internal/compatibility/async_dispatch.py +22 -16
- prefect/_internal/compatibility/deprecated.py +42 -18
- prefect/_internal/compatibility/migration.py +2 -2
- prefect/_internal/concurrency/inspection.py +12 -14
- prefect/_internal/concurrency/primitives.py +2 -2
- prefect/_internal/concurrency/services.py +154 -80
- prefect/_internal/concurrency/waiters.py +13 -9
- prefect/_internal/pydantic/annotations/pendulum.py +7 -7
- prefect/_internal/pytz.py +4 -3
- prefect/_internal/retries.py +10 -5
- prefect/_internal/schemas/bases.py +19 -10
- prefect/_internal/schemas/validators.py +227 -388
- prefect/_version.py +3 -3
- prefect/automations.py +236 -30
- prefect/blocks/__init__.py +3 -3
- prefect/blocks/abstract.py +53 -30
- prefect/blocks/core.py +183 -84
- prefect/blocks/notifications.py +133 -73
- prefect/blocks/redis.py +13 -9
- prefect/blocks/system.py +24 -11
- prefect/blocks/webhook.py +7 -5
- prefect/cache_policies.py +3 -2
- prefect/client/orchestration/__init__.py +1957 -0
- prefect/client/orchestration/_artifacts/__init__.py +0 -0
- prefect/client/orchestration/_artifacts/client.py +239 -0
- prefect/client/orchestration/_automations/__init__.py +0 -0
- prefect/client/orchestration/_automations/client.py +329 -0
- prefect/client/orchestration/_blocks_documents/__init__.py +0 -0
- prefect/client/orchestration/_blocks_documents/client.py +334 -0
- prefect/client/orchestration/_blocks_schemas/__init__.py +0 -0
- prefect/client/orchestration/_blocks_schemas/client.py +200 -0
- prefect/client/orchestration/_blocks_types/__init__.py +0 -0
- prefect/client/orchestration/_blocks_types/client.py +380 -0
- prefect/client/orchestration/_concurrency_limits/__init__.py +0 -0
- prefect/client/orchestration/_concurrency_limits/client.py +762 -0
- prefect/client/orchestration/_deployments/__init__.py +0 -0
- prefect/client/orchestration/_deployments/client.py +1128 -0
- prefect/client/orchestration/_flow_runs/__init__.py +0 -0
- prefect/client/orchestration/_flow_runs/client.py +903 -0
- prefect/client/orchestration/_flows/__init__.py +0 -0
- prefect/client/orchestration/_flows/client.py +343 -0
- prefect/client/orchestration/_logs/__init__.py +0 -0
- prefect/client/orchestration/_logs/client.py +97 -0
- prefect/client/orchestration/_variables/__init__.py +0 -0
- prefect/client/orchestration/_variables/client.py +157 -0
- prefect/client/orchestration/base.py +46 -0
- prefect/client/orchestration/routes.py +145 -0
- prefect/client/schemas/__init__.py +68 -28
- prefect/client/schemas/actions.py +2 -2
- prefect/client/schemas/filters.py +5 -0
- prefect/client/schemas/objects.py +8 -15
- prefect/client/schemas/schedules.py +22 -10
- prefect/concurrency/_asyncio.py +87 -0
- prefect/concurrency/{events.py → _events.py} +10 -10
- prefect/concurrency/asyncio.py +20 -104
- prefect/concurrency/context.py +6 -4
- prefect/concurrency/services.py +26 -74
- prefect/concurrency/sync.py +23 -44
- prefect/concurrency/v1/_asyncio.py +63 -0
- prefect/concurrency/v1/{events.py → _events.py} +13 -15
- prefect/concurrency/v1/asyncio.py +27 -80
- prefect/concurrency/v1/context.py +6 -4
- prefect/concurrency/v1/services.py +33 -79
- prefect/concurrency/v1/sync.py +18 -37
- prefect/context.py +66 -45
- prefect/deployments/base.py +10 -144
- prefect/deployments/flow_runs.py +12 -2
- prefect/deployments/runner.py +53 -4
- prefect/deployments/steps/pull.py +13 -0
- prefect/engine.py +17 -4
- prefect/events/clients.py +7 -1
- prefect/events/schemas/events.py +3 -2
- prefect/filesystems.py +6 -2
- prefect/flow_engine.py +101 -85
- prefect/flows.py +10 -1
- prefect/input/run_input.py +2 -1
- prefect/logging/logging.yml +1 -1
- prefect/main.py +1 -3
- prefect/results.py +2 -307
- prefect/runner/runner.py +4 -2
- prefect/runner/storage.py +87 -21
- prefect/serializers.py +32 -25
- prefect/settings/legacy.py +4 -4
- prefect/settings/models/api.py +3 -3
- prefect/settings/models/cli.py +3 -3
- prefect/settings/models/client.py +5 -3
- prefect/settings/models/cloud.py +8 -3
- prefect/settings/models/deployments.py +3 -3
- prefect/settings/models/experiments.py +4 -7
- prefect/settings/models/flows.py +3 -3
- prefect/settings/models/internal.py +4 -2
- prefect/settings/models/logging.py +4 -3
- prefect/settings/models/results.py +3 -3
- prefect/settings/models/root.py +3 -2
- prefect/settings/models/runner.py +4 -4
- prefect/settings/models/server/api.py +3 -3
- prefect/settings/models/server/database.py +11 -4
- prefect/settings/models/server/deployments.py +6 -2
- prefect/settings/models/server/ephemeral.py +4 -2
- prefect/settings/models/server/events.py +3 -2
- prefect/settings/models/server/flow_run_graph.py +6 -2
- prefect/settings/models/server/root.py +3 -3
- prefect/settings/models/server/services.py +26 -11
- prefect/settings/models/server/tasks.py +6 -3
- prefect/settings/models/server/ui.py +3 -3
- prefect/settings/models/tasks.py +5 -5
- prefect/settings/models/testing.py +3 -3
- prefect/settings/models/worker.py +5 -3
- prefect/settings/profiles.py +15 -2
- prefect/states.py +61 -45
- prefect/task_engine.py +54 -75
- prefect/task_runners.py +56 -55
- prefect/task_worker.py +2 -2
- prefect/tasks.py +90 -36
- prefect/telemetry/bootstrap.py +10 -9
- prefect/telemetry/run_telemetry.py +13 -8
- prefect/telemetry/services.py +4 -0
- prefect/transactions.py +4 -15
- prefect/utilities/_git.py +34 -0
- prefect/utilities/asyncutils.py +1 -1
- prefect/utilities/engine.py +3 -19
- prefect/utilities/generics.py +18 -0
- prefect/utilities/templating.py +25 -1
- prefect/workers/base.py +6 -3
- prefect/workers/process.py +1 -1
- {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/METADATA +2 -2
- {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/RECORD +135 -109
- prefect/client/orchestration.py +0 -4523
- prefect/records/__init__.py +0 -1
- prefect/records/base.py +0 -235
- prefect/records/filesystem.py +0 -213
- prefect/records/memory.py +0 -184
- prefect/records/result_store.py +0 -70
- {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/LICENSE +0 -0
- {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/WHEEL +0 -0
- {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/top_level.txt +0 -0
prefect/_experimental/lineage.py
CHANGED
@@ -86,18 +86,18 @@ async def emit_lineage_event(
|
|
86
86
|
else:
|
87
87
|
upstream_resources.extend(related_resources)
|
88
88
|
|
89
|
+
# We want to consider all resources upstream and downstream of the event as
|
90
|
+
# lineage-related, including flows, flow runs, etc., so we add the label to
|
91
|
+
# all resources involved in the event.
|
92
|
+
for res in upstream_resources + downstream_resources:
|
93
|
+
if "prefect.resource.lineage-group" not in res:
|
94
|
+
res["prefect.resource.lineage-group"] = "global"
|
95
|
+
|
89
96
|
# Emit an event for each downstream resource. This is necessary because
|
90
97
|
# our event schema allows one primary resource and many related resources,
|
91
98
|
# and for the purposes of lineage, related resources can only represent
|
92
99
|
# upstream resources.
|
93
100
|
for resource in downstream_resources:
|
94
|
-
# Downstream lineage resources need to have the
|
95
|
-
# prefect.resource.lineage-group label. All upstram resources from a
|
96
|
-
# downstream resource with this label will be considered lineage-related
|
97
|
-
# resources.
|
98
|
-
if "prefect.resource.lineage-group" not in resource:
|
99
|
-
resource["prefect.resource.lineage-group"] = "global"
|
100
|
-
|
101
101
|
emit_kwargs: Dict[str, Any] = {
|
102
102
|
"event": event_name,
|
103
103
|
"resource": resource,
|
@@ -170,7 +170,6 @@ async def emit_result_write_event(
|
|
170
170
|
{
|
171
171
|
"prefect.resource.id": result_resource_uri,
|
172
172
|
"prefect.resource.role": "result",
|
173
|
-
"prefect.resource.lineage-group": "global",
|
174
173
|
}
|
175
174
|
]
|
176
175
|
await emit_lineage_event(
|
File without changes
|
@@ -0,0 +1,66 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import TYPE_CHECKING
|
4
|
+
|
5
|
+
from prefect.client.orchestration.base import BaseAsyncClient, BaseClient
|
6
|
+
|
7
|
+
if TYPE_CHECKING:
|
8
|
+
from uuid import UUID
|
9
|
+
|
10
|
+
from prefect._experimental.sla.objects import SlaTypes
|
11
|
+
|
12
|
+
|
13
|
+
class SlaClient(BaseClient):
|
14
|
+
def create_sla(self, sla: "SlaTypes") -> "UUID":
|
15
|
+
"""
|
16
|
+
Creates a service level agreement.
|
17
|
+
Args:
|
18
|
+
sla: The SLA to create. Must have a deployment ID set.
|
19
|
+
Raises:
|
20
|
+
httpx.RequestError: if the SLA was not created for any reason
|
21
|
+
Returns:
|
22
|
+
the ID of the SLA in the backend
|
23
|
+
"""
|
24
|
+
if not sla.owner_resource:
|
25
|
+
raise ValueError(
|
26
|
+
"Deployment ID is not set. Please set using `set_deployment_id`."
|
27
|
+
)
|
28
|
+
|
29
|
+
response = self.request(
|
30
|
+
"POST",
|
31
|
+
"/slas/",
|
32
|
+
json=sla.model_dump(mode="json", exclude_unset=True),
|
33
|
+
)
|
34
|
+
response.raise_for_status()
|
35
|
+
|
36
|
+
from uuid import UUID
|
37
|
+
|
38
|
+
return UUID(response.json().get("id"))
|
39
|
+
|
40
|
+
|
41
|
+
class SlaAsyncClient(BaseAsyncClient):
|
42
|
+
async def create_sla(self, sla: "SlaTypes") -> "UUID":
|
43
|
+
"""
|
44
|
+
Creates a service level agreement.
|
45
|
+
Args:
|
46
|
+
sla: The SLA to create. Must have a deployment ID set.
|
47
|
+
Raises:
|
48
|
+
httpx.RequestError: if the SLA was not created for any reason
|
49
|
+
Returns:
|
50
|
+
the ID of the SLA in the backend
|
51
|
+
"""
|
52
|
+
if not sla.owner_resource:
|
53
|
+
raise ValueError(
|
54
|
+
"Deployment ID is not set. Please set using `set_deployment_id`."
|
55
|
+
)
|
56
|
+
|
57
|
+
response = await self.request(
|
58
|
+
"POST",
|
59
|
+
"/slas/",
|
60
|
+
json=sla.model_dump(mode="json", exclude_unset=True),
|
61
|
+
)
|
62
|
+
response.raise_for_status()
|
63
|
+
|
64
|
+
from uuid import UUID
|
65
|
+
|
66
|
+
return UUID(response.json().get("id"))
|
@@ -0,0 +1,53 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import abc
|
4
|
+
from typing import Literal, Optional, Union
|
5
|
+
from uuid import UUID
|
6
|
+
|
7
|
+
from pydantic import Field, PrivateAttr, computed_field
|
8
|
+
from typing_extensions import TypeAlias
|
9
|
+
|
10
|
+
from prefect._internal.schemas.bases import PrefectBaseModel
|
11
|
+
|
12
|
+
|
13
|
+
class ServiceLevelAgreement(PrefectBaseModel, abc.ABC):
|
14
|
+
"""An ORM representation of a Service Level Agreement."""
|
15
|
+
|
16
|
+
_deployment_id: Optional[UUID] = PrivateAttr(default=None)
|
17
|
+
|
18
|
+
name: str = Field(
|
19
|
+
default=...,
|
20
|
+
description="The name of the SLA. Names must be unique on a per-deployment basis.",
|
21
|
+
)
|
22
|
+
severity: Literal["minor", "low", "moderate", "high", "critical"] = Field(
|
23
|
+
default="moderate",
|
24
|
+
description="The severity of the SLA.",
|
25
|
+
)
|
26
|
+
enabled: Optional[bool] = Field(
|
27
|
+
default=True,
|
28
|
+
description="Whether the SLA is enabled.",
|
29
|
+
)
|
30
|
+
|
31
|
+
def set_deployment_id(self, deployment_id: UUID):
|
32
|
+
self._deployment_id = deployment_id
|
33
|
+
return self
|
34
|
+
|
35
|
+
@computed_field
|
36
|
+
@property
|
37
|
+
def owner_resource(self) -> Union[str, None]:
|
38
|
+
if self._deployment_id:
|
39
|
+
return f"prefect.deployment.{self._deployment_id}"
|
40
|
+
return None
|
41
|
+
|
42
|
+
|
43
|
+
class TimeToCompletionSla(ServiceLevelAgreement):
|
44
|
+
"""An SLA that triggers when a flow run takes longer than the specified duration."""
|
45
|
+
|
46
|
+
duration: int = Field(
|
47
|
+
default=...,
|
48
|
+
description="The maximum flow run duration allowed before the SLA is violated, expressed in seconds.",
|
49
|
+
)
|
50
|
+
|
51
|
+
|
52
|
+
# Concrete SLA types
|
53
|
+
SlaTypes: TypeAlias = Union[TimeToCompletionSla]
|
prefect/_internal/_logging.py
CHANGED
@@ -1,4 +1,14 @@
|
|
1
1
|
import logging
|
2
|
+
import sys
|
3
|
+
|
4
|
+
from typing_extensions import Self
|
5
|
+
|
6
|
+
if sys.version_info < (3, 11):
|
7
|
+
|
8
|
+
def getLevelNamesMapping() -> dict[str, int]:
|
9
|
+
return getattr(logging, "_nameToLevel").copy()
|
10
|
+
else:
|
11
|
+
getLevelNamesMapping = logging.getLevelNamesMapping # novermin
|
2
12
|
|
3
13
|
|
4
14
|
class SafeLogger(logging.Logger):
|
@@ -11,11 +21,13 @@ class SafeLogger(logging.Logger):
|
|
11
21
|
# deadlocks during complex concurrency handling
|
12
22
|
from prefect.settings import PREFECT_LOGGING_INTERNAL_LEVEL
|
13
23
|
|
14
|
-
|
24
|
+
internal_level = getLevelNamesMapping()[PREFECT_LOGGING_INTERNAL_LEVEL.value()]
|
25
|
+
|
26
|
+
return level >= internal_level
|
15
27
|
|
16
|
-
def getChild(self, suffix: str):
|
28
|
+
def getChild(self, suffix: str) -> Self:
|
17
29
|
logger = super().getChild(suffix)
|
18
|
-
logger.__class__ =
|
30
|
+
logger.__class__ = self.__class__
|
19
31
|
return logger
|
20
32
|
|
21
33
|
|
@@ -1,11 +1,13 @@
|
|
1
1
|
import asyncio
|
2
2
|
import inspect
|
3
|
+
from collections.abc import Coroutine
|
3
4
|
from functools import wraps
|
4
|
-
from typing import TYPE_CHECKING, Any, Callable,
|
5
|
+
from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, cast
|
5
6
|
|
6
7
|
from typing_extensions import ParamSpec
|
7
8
|
|
8
9
|
if TYPE_CHECKING:
|
10
|
+
from prefect.flows import Flow
|
9
11
|
from prefect.tasks import Task
|
10
12
|
|
11
13
|
R = TypeVar("R")
|
@@ -41,7 +43,9 @@ def is_in_async_context() -> bool:
|
|
41
43
|
|
42
44
|
|
43
45
|
def _is_acceptable_callable(
|
44
|
-
obj: Union[
|
46
|
+
obj: Union[
|
47
|
+
Callable[P, R], "Flow[P, R]", "Task[P, R]", "classmethod[type[Any], P, R]"
|
48
|
+
],
|
45
49
|
) -> bool:
|
46
50
|
if inspect.iscoroutinefunction(obj):
|
47
51
|
return True
|
@@ -58,7 +62,10 @@ def _is_acceptable_callable(
|
|
58
62
|
|
59
63
|
|
60
64
|
def async_dispatch(
|
61
|
-
async_impl:
|
65
|
+
async_impl: Union[
|
66
|
+
Callable[P, Coroutine[Any, Any, R]],
|
67
|
+
"classmethod[type[Any], P, Coroutine[Any, Any, R]]",
|
68
|
+
],
|
62
69
|
) -> Callable[[Callable[P, R]], Callable[P, Union[R, Coroutine[Any, Any, R]]]]:
|
63
70
|
"""
|
64
71
|
Decorator that dispatches to either sync or async implementation based on context.
|
@@ -66,27 +73,26 @@ def async_dispatch(
|
|
66
73
|
Args:
|
67
74
|
async_impl: The async implementation to dispatch to when in async context
|
68
75
|
"""
|
76
|
+
if not _is_acceptable_callable(async_impl):
|
77
|
+
raise TypeError("async_impl must be an async function")
|
78
|
+
if isinstance(async_impl, classmethod):
|
79
|
+
async_impl = cast(Callable[P, Coroutine[Any, Any, R]], async_impl.__func__)
|
69
80
|
|
70
81
|
def decorator(
|
71
82
|
sync_fn: Callable[P, R],
|
72
83
|
) -> Callable[P, Union[R, Coroutine[Any, Any, R]]]:
|
73
|
-
if not _is_acceptable_callable(async_impl):
|
74
|
-
raise TypeError("async_impl must be an async function")
|
75
|
-
|
76
84
|
@wraps(sync_fn)
|
77
85
|
def wrapper(
|
78
86
|
*args: P.args,
|
79
|
-
_sync: Optional[bool] = None, # type: ignore
|
80
87
|
**kwargs: P.kwargs,
|
81
88
|
) -> Union[R, Coroutine[Any, Any, R]]:
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
if
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
return wrapper # type: ignore
|
89
|
+
_sync = kwargs.pop("_sync", None)
|
90
|
+
should_run_sync = (
|
91
|
+
bool(_sync) if _sync is not None else not is_in_async_context()
|
92
|
+
)
|
93
|
+
fn = sync_fn if should_run_sync else async_impl
|
94
|
+
return fn(*args, **kwargs)
|
95
|
+
|
96
|
+
return wrapper
|
91
97
|
|
92
98
|
return decorator
|
@@ -13,10 +13,11 @@ e.g. Jan 2023.
|
|
13
13
|
import functools
|
14
14
|
import sys
|
15
15
|
import warnings
|
16
|
-
from typing import Any, Callable,
|
16
|
+
from typing import TYPE_CHECKING, Any, Callable, Optional, Union
|
17
17
|
|
18
18
|
import pendulum
|
19
19
|
from pydantic import BaseModel
|
20
|
+
from typing_extensions import ParamSpec, TypeAlias, TypeVar
|
20
21
|
|
21
22
|
from prefect.utilities.callables import get_call_parameters
|
22
23
|
from prefect.utilities.importtools import (
|
@@ -25,8 +26,10 @@ from prefect.utilities.importtools import (
|
|
25
26
|
to_qualified_name,
|
26
27
|
)
|
27
28
|
|
28
|
-
|
29
|
+
P = ParamSpec("P")
|
30
|
+
R = TypeVar("R", infer_variance=True)
|
29
31
|
M = TypeVar("M", bound=BaseModel)
|
32
|
+
T = TypeVar("T")
|
30
33
|
|
31
34
|
|
32
35
|
DEPRECATED_WARNING = (
|
@@ -38,7 +41,7 @@ DEPRECATED_MOVED_WARNING = (
|
|
38
41
|
"path after {end_date}. {help}"
|
39
42
|
)
|
40
43
|
DEPRECATED_DATEFMT = "MMM YYYY" # e.g. Feb 2023
|
41
|
-
DEPRECATED_MODULE_ALIASES:
|
44
|
+
DEPRECATED_MODULE_ALIASES: list[AliasedModuleDefinition] = []
|
42
45
|
|
43
46
|
|
44
47
|
class PrefectDeprecationWarning(DeprecationWarning):
|
@@ -53,7 +56,7 @@ def generate_deprecation_message(
|
|
53
56
|
end_date: Optional[str] = None,
|
54
57
|
help: str = "",
|
55
58
|
when: str = "",
|
56
|
-
):
|
59
|
+
) -> str:
|
57
60
|
if not start_date and not end_date:
|
58
61
|
raise ValueError(
|
59
62
|
"A start date is required if an end date is not provided. Suggested start"
|
@@ -61,6 +64,8 @@ def generate_deprecation_message(
|
|
61
64
|
)
|
62
65
|
|
63
66
|
if not end_date:
|
67
|
+
if TYPE_CHECKING:
|
68
|
+
assert start_date is not None
|
64
69
|
parsed_start_date = pendulum.from_format(start_date, DEPRECATED_DATEFMT)
|
65
70
|
parsed_end_date = parsed_start_date.add(months=6)
|
66
71
|
end_date = parsed_end_date.format(DEPRECATED_DATEFMT)
|
@@ -83,8 +88,8 @@ def deprecated_callable(
|
|
83
88
|
end_date: Optional[str] = None,
|
84
89
|
stacklevel: int = 2,
|
85
90
|
help: str = "",
|
86
|
-
) -> Callable[[
|
87
|
-
def decorator(fn:
|
91
|
+
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
92
|
+
def decorator(fn: Callable[P, R]) -> Callable[P, R]:
|
88
93
|
message = generate_deprecation_message(
|
89
94
|
name=to_qualified_name(fn),
|
90
95
|
start_date=start_date,
|
@@ -93,7 +98,7 @@ def deprecated_callable(
|
|
93
98
|
)
|
94
99
|
|
95
100
|
@functools.wraps(fn)
|
96
|
-
def wrapper(*args, **kwargs):
|
101
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
97
102
|
warnings.warn(message, PrefectDeprecationWarning, stacklevel=stacklevel)
|
98
103
|
return fn(*args, **kwargs)
|
99
104
|
|
@@ -108,8 +113,8 @@ def deprecated_class(
|
|
108
113
|
end_date: Optional[str] = None,
|
109
114
|
stacklevel: int = 2,
|
110
115
|
help: str = "",
|
111
|
-
) -> Callable[[T], T]:
|
112
|
-
def decorator(cls: T):
|
116
|
+
) -> Callable[[type[T]], type[T]]:
|
117
|
+
def decorator(cls: type[T]) -> type[T]:
|
113
118
|
message = generate_deprecation_message(
|
114
119
|
name=to_qualified_name(cls),
|
115
120
|
start_date=start_date,
|
@@ -120,7 +125,7 @@ def deprecated_class(
|
|
120
125
|
original_init = cls.__init__
|
121
126
|
|
122
127
|
@functools.wraps(original_init)
|
123
|
-
def new_init(self, *args, **kwargs):
|
128
|
+
def new_init(self: T, *args: Any, **kwargs: Any) -> None:
|
124
129
|
warnings.warn(message, PrefectDeprecationWarning, stacklevel=stacklevel)
|
125
130
|
original_init(self, *args, **kwargs)
|
126
131
|
|
@@ -139,7 +144,7 @@ def deprecated_parameter(
|
|
139
144
|
help: str = "",
|
140
145
|
when: Optional[Callable[[Any], bool]] = None,
|
141
146
|
when_message: str = "",
|
142
|
-
) -> Callable[[
|
147
|
+
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
143
148
|
"""
|
144
149
|
Mark a parameter in a callable as deprecated.
|
145
150
|
|
@@ -155,7 +160,7 @@ def deprecated_parameter(
|
|
155
160
|
|
156
161
|
when = when or (lambda _: True)
|
157
162
|
|
158
|
-
def decorator(fn:
|
163
|
+
def decorator(fn: Callable[P, R]) -> Callable[P, R]:
|
159
164
|
message = generate_deprecation_message(
|
160
165
|
name=f"The parameter {name!r} for {fn.__name__!r}",
|
161
166
|
start_date=start_date,
|
@@ -165,7 +170,7 @@ def deprecated_parameter(
|
|
165
170
|
)
|
166
171
|
|
167
172
|
@functools.wraps(fn)
|
168
|
-
def wrapper(*args, **kwargs):
|
173
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
169
174
|
try:
|
170
175
|
parameters = get_call_parameters(fn, args, kwargs, apply_defaults=False)
|
171
176
|
except Exception:
|
@@ -182,6 +187,10 @@ def deprecated_parameter(
|
|
182
187
|
return decorator
|
183
188
|
|
184
189
|
|
190
|
+
JsonValue: TypeAlias = Union[int, float, str, bool, None, list["JsonValue"], "JsonDict"]
|
191
|
+
JsonDict: TypeAlias = dict[str, JsonValue]
|
192
|
+
|
193
|
+
|
185
194
|
def deprecated_field(
|
186
195
|
name: str,
|
187
196
|
*,
|
@@ -191,7 +200,7 @@ def deprecated_field(
|
|
191
200
|
help: str = "",
|
192
201
|
when: Optional[Callable[[Any], bool]] = None,
|
193
202
|
stacklevel: int = 2,
|
194
|
-
):
|
203
|
+
) -> Callable[[type[M]], type[M]]:
|
195
204
|
"""
|
196
205
|
Mark a field in a Pydantic model as deprecated.
|
197
206
|
|
@@ -212,7 +221,7 @@ def deprecated_field(
|
|
212
221
|
|
213
222
|
# Replaces the model's __init__ method with one that performs an additional warning
|
214
223
|
# check
|
215
|
-
def decorator(model_cls:
|
224
|
+
def decorator(model_cls: type[M]) -> type[M]:
|
216
225
|
message = generate_deprecation_message(
|
217
226
|
name=f"The field {name!r} in {model_cls.__name__!r}",
|
218
227
|
start_date=start_date,
|
@@ -224,7 +233,7 @@ def deprecated_field(
|
|
224
233
|
cls_init = model_cls.__init__
|
225
234
|
|
226
235
|
@functools.wraps(model_cls.__init__)
|
227
|
-
def __init__(__pydantic_self__, **data: Any) -> None:
|
236
|
+
def __init__(__pydantic_self__: M, **data: Any) -> None:
|
228
237
|
if name in data.keys() and when(data[name]):
|
229
238
|
warnings.warn(message, PrefectDeprecationWarning, stacklevel=stacklevel)
|
230
239
|
|
@@ -232,8 +241,23 @@ def deprecated_field(
|
|
232
241
|
|
233
242
|
field = __pydantic_self__.model_fields.get(name)
|
234
243
|
if field is not None:
|
235
|
-
|
236
|
-
|
244
|
+
json_schema_extra = field.json_schema_extra or {}
|
245
|
+
|
246
|
+
if not isinstance(json_schema_extra, dict):
|
247
|
+
# json_schema_extra is a hook function; wrap it to add the deprecated flag.
|
248
|
+
extra_func = json_schema_extra
|
249
|
+
|
250
|
+
@functools.wraps(extra_func)
|
251
|
+
def wrapped(__json_schema: JsonDict) -> None:
|
252
|
+
extra_func(__json_schema)
|
253
|
+
__json_schema["deprecated"] = True
|
254
|
+
|
255
|
+
json_schema_extra = wrapped
|
256
|
+
|
257
|
+
else:
|
258
|
+
json_schema_extra["deprecated"] = True
|
259
|
+
|
260
|
+
field.json_schema_extra = json_schema_extra
|
237
261
|
|
238
262
|
# Patch the model's init method
|
239
263
|
model_cls.__init__ = __init__
|
@@ -43,7 +43,7 @@ Removed objects:
|
|
43
43
|
"""
|
44
44
|
|
45
45
|
import sys
|
46
|
-
from typing import Any, Callable
|
46
|
+
from typing import Any, Callable
|
47
47
|
|
48
48
|
from pydantic_core import PydanticCustomError
|
49
49
|
|
@@ -157,7 +157,7 @@ def getattr_migration(module_name: str) -> Callable[[str], Any]:
|
|
157
157
|
f"`{import_path}` has been removed. {error_message}"
|
158
158
|
)
|
159
159
|
|
160
|
-
globals:
|
160
|
+
globals: dict[str, Any] = sys.modules[module_name].__dict__
|
161
161
|
if name in globals:
|
162
162
|
return globals[name]
|
163
163
|
|
@@ -7,7 +7,6 @@ import linecache
|
|
7
7
|
import sys
|
8
8
|
import threading
|
9
9
|
from types import FrameType
|
10
|
-
from typing import List, Optional
|
11
10
|
|
12
11
|
"""
|
13
12
|
The following functions are derived from dask/distributed which is licensed under the
|
@@ -72,26 +71,25 @@ def repr_frame(frame: FrameType) -> str:
|
|
72
71
|
return text + "\n\t" + line
|
73
72
|
|
74
73
|
|
75
|
-
def call_stack(frame: FrameType) ->
|
74
|
+
def call_stack(frame: FrameType) -> list[str]:
|
76
75
|
"""Create a call text stack from a frame"""
|
77
|
-
|
78
|
-
cur_frame
|
76
|
+
frames: list[str] = []
|
77
|
+
cur_frame = frame
|
79
78
|
while cur_frame:
|
80
|
-
|
79
|
+
frames.append(repr_frame(cur_frame))
|
81
80
|
cur_frame = cur_frame.f_back
|
82
|
-
return
|
81
|
+
return frames[::-1]
|
83
82
|
|
84
83
|
|
85
|
-
def stack_for_threads(*threads: threading.Thread) ->
|
86
|
-
frames = sys._current_frames()
|
84
|
+
def stack_for_threads(*threads: threading.Thread) -> list[str]:
|
85
|
+
frames = sys._current_frames() # pyright: ignore[reportPrivateUsage]
|
87
86
|
try:
|
88
|
-
lines = []
|
87
|
+
lines: list[str] = []
|
89
88
|
for thread in threads:
|
90
|
-
|
91
|
-
|
92
|
-
)
|
93
|
-
thread_frames
|
94
|
-
if thread_frames:
|
89
|
+
ident = thread.ident
|
90
|
+
hex_ident = hex(ident) if ident is not None else "<unknown>"
|
91
|
+
lines.append(f"------ Call stack of {thread.name} ({hex_ident}) -----")
|
92
|
+
if ident is not None and (thread_frames := frames.get(ident)):
|
95
93
|
lines.append("".join(call_stack(thread_frames)))
|
96
94
|
else:
|
97
95
|
lines.append("No stack frames found")
|
@@ -27,7 +27,7 @@ class Event:
|
|
27
27
|
"""
|
28
28
|
|
29
29
|
def __init__(self) -> None:
|
30
|
-
self._waiters = collections.deque()
|
30
|
+
self._waiters: collections.deque[asyncio.Future[bool]] = collections.deque()
|
31
31
|
self._value = False
|
32
32
|
self._lock = threading.Lock()
|
33
33
|
|
@@ -69,7 +69,7 @@ class Event:
|
|
69
69
|
if self._value:
|
70
70
|
return True
|
71
71
|
|
72
|
-
fut = asyncio.get_running_loop().create_future()
|
72
|
+
fut: asyncio.Future[bool] = asyncio.get_running_loop().create_future()
|
73
73
|
self._waiters.append(fut)
|
74
74
|
|
75
75
|
try:
|