prefect-client 2.19.3__py3-none-any.whl → 3.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/__init__.py +8 -56
- prefect/_internal/compatibility/deprecated.py +6 -115
- prefect/_internal/compatibility/experimental.py +4 -79
- prefect/_internal/concurrency/api.py +0 -34
- prefect/_internal/concurrency/calls.py +0 -6
- prefect/_internal/concurrency/cancellation.py +0 -3
- prefect/_internal/concurrency/event_loop.py +0 -20
- prefect/_internal/concurrency/inspection.py +3 -3
- prefect/_internal/concurrency/threads.py +35 -0
- prefect/_internal/concurrency/waiters.py +0 -28
- prefect/_internal/pydantic/__init__.py +0 -45
- prefect/_internal/pydantic/v1_schema.py +21 -22
- prefect/_internal/pydantic/v2_schema.py +0 -2
- prefect/_internal/pydantic/v2_validated_func.py +18 -23
- prefect/_internal/schemas/bases.py +44 -177
- prefect/_internal/schemas/fields.py +1 -43
- prefect/_internal/schemas/validators.py +60 -158
- prefect/artifacts.py +161 -14
- prefect/automations.py +39 -4
- prefect/blocks/abstract.py +1 -1
- prefect/blocks/core.py +268 -148
- prefect/blocks/fields.py +2 -57
- prefect/blocks/kubernetes.py +8 -12
- prefect/blocks/notifications.py +40 -20
- prefect/blocks/system.py +22 -11
- prefect/blocks/webhook.py +2 -9
- prefect/client/base.py +4 -4
- prefect/client/cloud.py +8 -13
- prefect/client/orchestration.py +347 -341
- prefect/client/schemas/actions.py +92 -86
- prefect/client/schemas/filters.py +20 -40
- prefect/client/schemas/objects.py +147 -145
- prefect/client/schemas/responses.py +16 -24
- prefect/client/schemas/schedules.py +47 -35
- prefect/client/subscriptions.py +2 -2
- prefect/client/utilities.py +5 -2
- prefect/concurrency/asyncio.py +3 -1
- prefect/concurrency/events.py +1 -1
- prefect/concurrency/services.py +6 -3
- prefect/context.py +195 -27
- prefect/deployments/__init__.py +5 -6
- prefect/deployments/base.py +7 -5
- prefect/deployments/flow_runs.py +185 -0
- prefect/deployments/runner.py +50 -45
- prefect/deployments/schedules.py +28 -23
- prefect/deployments/steps/__init__.py +0 -1
- prefect/deployments/steps/core.py +1 -0
- prefect/deployments/steps/pull.py +7 -21
- prefect/engine.py +12 -2422
- prefect/events/actions.py +17 -23
- prefect/events/cli/automations.py +19 -6
- prefect/events/clients.py +14 -37
- prefect/events/filters.py +14 -18
- prefect/events/related.py +2 -2
- prefect/events/schemas/__init__.py +0 -5
- prefect/events/schemas/automations.py +55 -46
- prefect/events/schemas/deployment_triggers.py +7 -197
- prefect/events/schemas/events.py +34 -65
- prefect/events/schemas/labelling.py +10 -14
- prefect/events/utilities.py +2 -3
- prefect/events/worker.py +2 -3
- prefect/filesystems.py +6 -517
- prefect/{new_flow_engine.py → flow_engine.py} +313 -72
- prefect/flow_runs.py +377 -5
- prefect/flows.py +248 -165
- prefect/futures.py +186 -345
- prefect/infrastructure/__init__.py +0 -27
- prefect/infrastructure/provisioners/__init__.py +5 -3
- prefect/infrastructure/provisioners/cloud_run.py +11 -6
- prefect/infrastructure/provisioners/container_instance.py +11 -7
- prefect/infrastructure/provisioners/ecs.py +6 -4
- prefect/infrastructure/provisioners/modal.py +8 -5
- prefect/input/actions.py +2 -4
- prefect/input/run_input.py +5 -7
- prefect/logging/formatters.py +0 -2
- prefect/logging/handlers.py +3 -11
- prefect/logging/loggers.py +2 -2
- prefect/manifests.py +2 -1
- prefect/records/__init__.py +1 -0
- prefect/records/result_store.py +42 -0
- prefect/records/store.py +9 -0
- prefect/results.py +43 -39
- prefect/runner/runner.py +9 -9
- prefect/runner/server.py +6 -10
- prefect/runner/storage.py +3 -8
- prefect/runner/submit.py +2 -2
- prefect/runner/utils.py +2 -2
- prefect/serializers.py +24 -35
- prefect/server/api/collections_data/views/aggregate-worker-metadata.json +5 -14
- prefect/settings.py +70 -133
- prefect/states.py +17 -47
- prefect/task_engine.py +697 -58
- prefect/task_runners.py +269 -301
- prefect/task_server.py +53 -34
- prefect/tasks.py +327 -337
- prefect/transactions.py +220 -0
- prefect/types/__init__.py +61 -82
- prefect/utilities/asyncutils.py +195 -136
- prefect/utilities/callables.py +121 -41
- prefect/utilities/collections.py +23 -38
- prefect/utilities/dispatch.py +11 -3
- prefect/utilities/dockerutils.py +4 -0
- prefect/utilities/engine.py +140 -20
- prefect/utilities/importtools.py +26 -27
- prefect/utilities/pydantic.py +128 -38
- prefect/utilities/schema_tools/hydration.py +5 -1
- prefect/utilities/templating.py +12 -2
- prefect/variables.py +78 -61
- prefect/workers/__init__.py +0 -1
- prefect/workers/base.py +15 -17
- prefect/workers/process.py +3 -8
- prefect/workers/server.py +2 -2
- {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/METADATA +22 -21
- prefect_client-3.0.0rc1.dist-info/RECORD +176 -0
- prefect/_internal/pydantic/_base_model.py +0 -51
- prefect/_internal/pydantic/_compat.py +0 -82
- prefect/_internal/pydantic/_flags.py +0 -20
- prefect/_internal/pydantic/_types.py +0 -8
- prefect/_internal/pydantic/utilities/__init__.py +0 -0
- prefect/_internal/pydantic/utilities/config_dict.py +0 -72
- prefect/_internal/pydantic/utilities/field_validator.py +0 -150
- prefect/_internal/pydantic/utilities/model_construct.py +0 -56
- prefect/_internal/pydantic/utilities/model_copy.py +0 -55
- prefect/_internal/pydantic/utilities/model_dump.py +0 -136
- prefect/_internal/pydantic/utilities/model_dump_json.py +0 -112
- prefect/_internal/pydantic/utilities/model_fields.py +0 -50
- prefect/_internal/pydantic/utilities/model_fields_set.py +0 -29
- prefect/_internal/pydantic/utilities/model_json_schema.py +0 -82
- prefect/_internal/pydantic/utilities/model_rebuild.py +0 -80
- prefect/_internal/pydantic/utilities/model_validate.py +0 -75
- prefect/_internal/pydantic/utilities/model_validate_json.py +0 -68
- prefect/_internal/pydantic/utilities/model_validator.py +0 -87
- prefect/_internal/pydantic/utilities/type_adapter.py +0 -71
- prefect/_vendor/__init__.py +0 -0
- prefect/_vendor/fastapi/__init__.py +0 -25
- prefect/_vendor/fastapi/applications.py +0 -946
- prefect/_vendor/fastapi/background.py +0 -3
- prefect/_vendor/fastapi/concurrency.py +0 -44
- prefect/_vendor/fastapi/datastructures.py +0 -58
- prefect/_vendor/fastapi/dependencies/__init__.py +0 -0
- prefect/_vendor/fastapi/dependencies/models.py +0 -64
- prefect/_vendor/fastapi/dependencies/utils.py +0 -877
- prefect/_vendor/fastapi/encoders.py +0 -177
- prefect/_vendor/fastapi/exception_handlers.py +0 -40
- prefect/_vendor/fastapi/exceptions.py +0 -46
- prefect/_vendor/fastapi/logger.py +0 -3
- prefect/_vendor/fastapi/middleware/__init__.py +0 -1
- prefect/_vendor/fastapi/middleware/asyncexitstack.py +0 -25
- prefect/_vendor/fastapi/middleware/cors.py +0 -3
- prefect/_vendor/fastapi/middleware/gzip.py +0 -3
- prefect/_vendor/fastapi/middleware/httpsredirect.py +0 -3
- prefect/_vendor/fastapi/middleware/trustedhost.py +0 -3
- prefect/_vendor/fastapi/middleware/wsgi.py +0 -3
- prefect/_vendor/fastapi/openapi/__init__.py +0 -0
- prefect/_vendor/fastapi/openapi/constants.py +0 -2
- prefect/_vendor/fastapi/openapi/docs.py +0 -203
- prefect/_vendor/fastapi/openapi/models.py +0 -480
- prefect/_vendor/fastapi/openapi/utils.py +0 -485
- prefect/_vendor/fastapi/param_functions.py +0 -340
- prefect/_vendor/fastapi/params.py +0 -453
- prefect/_vendor/fastapi/requests.py +0 -4
- prefect/_vendor/fastapi/responses.py +0 -40
- prefect/_vendor/fastapi/routing.py +0 -1331
- prefect/_vendor/fastapi/security/__init__.py +0 -15
- prefect/_vendor/fastapi/security/api_key.py +0 -98
- prefect/_vendor/fastapi/security/base.py +0 -6
- prefect/_vendor/fastapi/security/http.py +0 -172
- prefect/_vendor/fastapi/security/oauth2.py +0 -227
- prefect/_vendor/fastapi/security/open_id_connect_url.py +0 -34
- prefect/_vendor/fastapi/security/utils.py +0 -10
- prefect/_vendor/fastapi/staticfiles.py +0 -1
- prefect/_vendor/fastapi/templating.py +0 -3
- prefect/_vendor/fastapi/testclient.py +0 -1
- prefect/_vendor/fastapi/types.py +0 -3
- prefect/_vendor/fastapi/utils.py +0 -235
- prefect/_vendor/fastapi/websockets.py +0 -7
- prefect/_vendor/starlette/__init__.py +0 -1
- prefect/_vendor/starlette/_compat.py +0 -28
- prefect/_vendor/starlette/_exception_handler.py +0 -80
- prefect/_vendor/starlette/_utils.py +0 -88
- prefect/_vendor/starlette/applications.py +0 -261
- prefect/_vendor/starlette/authentication.py +0 -159
- prefect/_vendor/starlette/background.py +0 -43
- prefect/_vendor/starlette/concurrency.py +0 -59
- prefect/_vendor/starlette/config.py +0 -151
- prefect/_vendor/starlette/convertors.py +0 -87
- prefect/_vendor/starlette/datastructures.py +0 -707
- prefect/_vendor/starlette/endpoints.py +0 -130
- prefect/_vendor/starlette/exceptions.py +0 -60
- prefect/_vendor/starlette/formparsers.py +0 -276
- prefect/_vendor/starlette/middleware/__init__.py +0 -17
- prefect/_vendor/starlette/middleware/authentication.py +0 -52
- prefect/_vendor/starlette/middleware/base.py +0 -220
- prefect/_vendor/starlette/middleware/cors.py +0 -176
- prefect/_vendor/starlette/middleware/errors.py +0 -265
- prefect/_vendor/starlette/middleware/exceptions.py +0 -74
- prefect/_vendor/starlette/middleware/gzip.py +0 -113
- prefect/_vendor/starlette/middleware/httpsredirect.py +0 -19
- prefect/_vendor/starlette/middleware/sessions.py +0 -82
- prefect/_vendor/starlette/middleware/trustedhost.py +0 -64
- prefect/_vendor/starlette/middleware/wsgi.py +0 -147
- prefect/_vendor/starlette/requests.py +0 -328
- prefect/_vendor/starlette/responses.py +0 -347
- prefect/_vendor/starlette/routing.py +0 -933
- prefect/_vendor/starlette/schemas.py +0 -154
- prefect/_vendor/starlette/staticfiles.py +0 -248
- prefect/_vendor/starlette/status.py +0 -199
- prefect/_vendor/starlette/templating.py +0 -231
- prefect/_vendor/starlette/testclient.py +0 -804
- prefect/_vendor/starlette/types.py +0 -30
- prefect/_vendor/starlette/websockets.py +0 -193
- prefect/agent.py +0 -698
- prefect/deployments/deployments.py +0 -1042
- prefect/deprecated/__init__.py +0 -0
- prefect/deprecated/data_documents.py +0 -350
- prefect/deprecated/packaging/__init__.py +0 -12
- prefect/deprecated/packaging/base.py +0 -96
- prefect/deprecated/packaging/docker.py +0 -146
- prefect/deprecated/packaging/file.py +0 -92
- prefect/deprecated/packaging/orion.py +0 -80
- prefect/deprecated/packaging/serializers.py +0 -171
- prefect/events/instrument.py +0 -135
- prefect/infrastructure/base.py +0 -323
- prefect/infrastructure/container.py +0 -818
- prefect/infrastructure/kubernetes.py +0 -920
- prefect/infrastructure/process.py +0 -289
- prefect/new_task_engine.py +0 -423
- prefect/pydantic/__init__.py +0 -76
- prefect/pydantic/main.py +0 -39
- prefect/software/__init__.py +0 -2
- prefect/software/base.py +0 -50
- prefect/software/conda.py +0 -199
- prefect/software/pip.py +0 -122
- prefect/software/python.py +0 -52
- prefect/workers/block.py +0 -218
- prefect_client-2.19.3.dist-info/RECORD +0 -292
- {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/LICENSE +0 -0
- {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/WHEEL +0 -0
- {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/top_level.txt +0 -0
prefect/context.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
|
1
|
+
"""
|
2
2
|
Async and thread safe models for passing runtime context data.
|
3
3
|
|
4
4
|
These contexts should never be directly mutated by the user.
|
@@ -10,7 +10,7 @@ import os
|
|
10
10
|
import sys
|
11
11
|
import warnings
|
12
12
|
from collections import defaultdict
|
13
|
-
from contextlib import contextmanager
|
13
|
+
from contextlib import ExitStack, contextmanager
|
14
14
|
from contextvars import ContextVar, Token
|
15
15
|
from functools import update_wrapper
|
16
16
|
from pathlib import Path
|
@@ -29,21 +29,19 @@ from typing import (
|
|
29
29
|
Union,
|
30
30
|
)
|
31
31
|
|
32
|
+
import anyio
|
33
|
+
import anyio._backends._asyncio
|
32
34
|
import anyio.abc
|
33
35
|
import pendulum
|
34
|
-
|
35
|
-
from
|
36
|
-
|
37
|
-
|
38
|
-
from pydantic.v1 import BaseModel, Field, PrivateAttr
|
39
|
-
else:
|
40
|
-
from pydantic import BaseModel, Field, PrivateAttr
|
36
|
+
from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
|
37
|
+
from pydantic_extra_types.pendulum_dt import DateTime
|
38
|
+
from sniffio import AsyncLibraryNotFoundError
|
39
|
+
from typing_extensions import Self
|
41
40
|
|
42
41
|
import prefect.logging
|
43
42
|
import prefect.logging.configuration
|
44
43
|
import prefect.settings
|
45
|
-
from prefect.
|
46
|
-
from prefect.client.orchestration import PrefectClient, SyncPrefectClient
|
44
|
+
from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_client
|
47
45
|
from prefect.client.schemas import FlowRun, TaskRun
|
48
46
|
from prefect.events.worker import EventsWorker
|
49
47
|
from prefect.exceptions import MissingContextError
|
@@ -51,7 +49,8 @@ from prefect.futures import PrefectFuture
|
|
51
49
|
from prefect.results import ResultFactory
|
52
50
|
from prefect.settings import PREFECT_HOME, Profile, Settings
|
53
51
|
from prefect.states import State
|
54
|
-
from prefect.task_runners import
|
52
|
+
from prefect.task_runners import TaskRunner
|
53
|
+
from prefect.utilities.asyncutils import run_coro_as_sync
|
55
54
|
from prefect.utilities.importtools import load_script_as_module
|
56
55
|
|
57
56
|
T = TypeVar("T")
|
@@ -63,7 +62,71 @@ if TYPE_CHECKING:
|
|
63
62
|
# Define the global settings context variable
|
64
63
|
# This will be populated downstream but must be null here to facilitate loading the
|
65
64
|
# default settings.
|
66
|
-
GLOBAL_SETTINGS_CONTEXT = None
|
65
|
+
GLOBAL_SETTINGS_CONTEXT = None # type: ignore
|
66
|
+
|
67
|
+
|
68
|
+
def serialize_context() -> Dict[str, Any]:
|
69
|
+
"""
|
70
|
+
Serialize the current context for use in a remote execution environment.
|
71
|
+
"""
|
72
|
+
|
73
|
+
flow_run_context = EngineContext.get()
|
74
|
+
task_run_context = TaskRunContext.get()
|
75
|
+
tags_context = TagsContext.get()
|
76
|
+
settings_context = SettingsContext.get()
|
77
|
+
|
78
|
+
return {
|
79
|
+
"flow_run_context": flow_run_context.serialize() if flow_run_context else {},
|
80
|
+
"task_run_context": task_run_context.serialize() if task_run_context else {},
|
81
|
+
"tags_context": tags_context.serialize() if tags_context else {},
|
82
|
+
"settings_context": settings_context.serialize() if settings_context else {},
|
83
|
+
}
|
84
|
+
|
85
|
+
|
86
|
+
@contextmanager
|
87
|
+
def hydrated_context(
|
88
|
+
serialized_context: Optional[Dict[str, Any]] = None,
|
89
|
+
client: Union[PrefectClient, SyncPrefectClient, None] = None,
|
90
|
+
):
|
91
|
+
with ExitStack() as stack:
|
92
|
+
if serialized_context:
|
93
|
+
# Set up settings context
|
94
|
+
if settings_context := serialized_context.get("settings_context"):
|
95
|
+
stack.enter_context(SettingsContext(**settings_context))
|
96
|
+
# Set up parent flow run context
|
97
|
+
# TODO: This task group isn't necessary in the new engine. Remove the background tasks
|
98
|
+
# attribute from FlowRunContext.
|
99
|
+
client = client or get_client(sync_client=True)
|
100
|
+
if flow_run_context := serialized_context.get("flow_run_context"):
|
101
|
+
try:
|
102
|
+
task_group = anyio.create_task_group()
|
103
|
+
except AsyncLibraryNotFoundError:
|
104
|
+
task_group = anyio._backends._asyncio.TaskGroup()
|
105
|
+
flow = flow_run_context["flow"]
|
106
|
+
flow_run_context = FlowRunContext(
|
107
|
+
**flow_run_context,
|
108
|
+
client=client,
|
109
|
+
background_tasks=task_group,
|
110
|
+
result_factory=run_coro_as_sync(ResultFactory.from_flow(flow)),
|
111
|
+
task_runner=flow.task_runner.duplicate(),
|
112
|
+
detached=True,
|
113
|
+
)
|
114
|
+
stack.enter_context(flow_run_context)
|
115
|
+
# Set up parent task run context
|
116
|
+
if parent_task_run_context := serialized_context.get("task_run_context"):
|
117
|
+
parent_task = parent_task_run_context["task"]
|
118
|
+
task_run_context = TaskRunContext(
|
119
|
+
**parent_task_run_context,
|
120
|
+
client=client,
|
121
|
+
result_factory=run_coro_as_sync(
|
122
|
+
ResultFactory.from_autonomous_task(parent_task)
|
123
|
+
),
|
124
|
+
)
|
125
|
+
stack.enter_context(task_run_context)
|
126
|
+
# Set up tags context
|
127
|
+
if tags_context := serialized_context.get("tags_context"):
|
128
|
+
stack.enter_context(tags(*tags_context["current_tags"]))
|
129
|
+
yield
|
67
130
|
|
68
131
|
|
69
132
|
class ContextModel(BaseModel):
|
@@ -74,12 +137,11 @@ class ContextModel(BaseModel):
|
|
74
137
|
|
75
138
|
# The context variable for storing data must be defined by the child class
|
76
139
|
__var__: ContextVar
|
77
|
-
_token: Token = PrivateAttr(None)
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
extra = "forbid"
|
140
|
+
_token: Optional[Token] = PrivateAttr(None)
|
141
|
+
model_config = ConfigDict(
|
142
|
+
arbitrary_types_allowed=True,
|
143
|
+
extra="forbid",
|
144
|
+
)
|
83
145
|
|
84
146
|
def __enter__(self):
|
85
147
|
if self._token is not None:
|
@@ -98,10 +160,13 @@ class ContextModel(BaseModel):
|
|
98
160
|
self._token = None
|
99
161
|
|
100
162
|
@classmethod
|
101
|
-
def get(cls: Type[
|
163
|
+
def get(cls: Type[Self]) -> Optional[Self]:
|
164
|
+
"""Get the current context instance"""
|
102
165
|
return cls.__var__.get(None)
|
103
166
|
|
104
|
-
def
|
167
|
+
def model_copy(
|
168
|
+
self: Self, *, update: Optional[Dict[str, Any]] = None, deep: bool = False
|
169
|
+
):
|
105
170
|
"""
|
106
171
|
Duplicate the context model, optionally choosing which fields to include, exclude, or change.
|
107
172
|
|
@@ -115,11 +180,17 @@ class ContextModel(BaseModel):
|
|
115
180
|
Returns:
|
116
181
|
A new model instance.
|
117
182
|
"""
|
183
|
+
new = super().model_copy(update=update, deep=deep)
|
118
184
|
# Remove the token on copy to avoid re-entrance errors
|
119
|
-
new = super().copy(**kwargs)
|
120
185
|
new._token = None
|
121
186
|
return new
|
122
187
|
|
188
|
+
def serialize(self) -> Dict[str, Any]:
|
189
|
+
"""
|
190
|
+
Serialize the context model to a dictionary that can be pickled with cloudpickle.
|
191
|
+
"""
|
192
|
+
return self.model_dump(exclude_unset=True)
|
193
|
+
|
123
194
|
|
124
195
|
class PrefectObjectRegistry(ContextModel):
|
125
196
|
"""
|
@@ -132,7 +203,7 @@ class PrefectObjectRegistry(ContextModel):
|
|
132
203
|
capture_failures: If set, failures during __init__ will be silenced and tracked.
|
133
204
|
"""
|
134
205
|
|
135
|
-
start_time:
|
206
|
+
start_time: DateTime = Field(default_factory=lambda: pendulum.now("UTC"))
|
136
207
|
|
137
208
|
_instance_registry: Dict[Type[T], List[T]] = PrivateAttr(
|
138
209
|
default_factory=lambda: defaultdict(list)
|
@@ -201,6 +272,67 @@ class PrefectObjectRegistry(ContextModel):
|
|
201
272
|
return type_
|
202
273
|
|
203
274
|
|
275
|
+
class ClientContext(ContextModel):
|
276
|
+
"""
|
277
|
+
A context for managing the Prefect client instances.
|
278
|
+
|
279
|
+
Clients were formerly tracked on the TaskRunContext and FlowRunContext, but
|
280
|
+
having two separate places and the addition of both sync and async clients
|
281
|
+
made it difficult to manage. This context is intended to be the single
|
282
|
+
source for clients.
|
283
|
+
|
284
|
+
The client creates both sync and async clients, which can either be read
|
285
|
+
directly from the context object OR loaded with get_client, inject_client,
|
286
|
+
or other Prefect utilities.
|
287
|
+
|
288
|
+
with ClientContext.get_or_create() as ctx:
|
289
|
+
c1 = get_client(sync_client=True)
|
290
|
+
c2 = get_client(sync_client=True)
|
291
|
+
assert c1 is c2
|
292
|
+
assert c1 is ctx.sync_client
|
293
|
+
"""
|
294
|
+
|
295
|
+
__var__ = ContextVar("clients")
|
296
|
+
sync_client: SyncPrefectClient
|
297
|
+
async_client: PrefectClient
|
298
|
+
_httpx_settings: Optional[dict[str, Any]] = PrivateAttr(None)
|
299
|
+
_context_stack: int = PrivateAttr(0)
|
300
|
+
|
301
|
+
def __init__(self, httpx_settings: Optional[dict[str, Any]] = None):
|
302
|
+
super().__init__(
|
303
|
+
sync_client=get_client(sync_client=True, httpx_settings=httpx_settings),
|
304
|
+
async_client=get_client(sync_client=False, httpx_settings=httpx_settings),
|
305
|
+
)
|
306
|
+
self._httpx_settings = httpx_settings
|
307
|
+
self._context_stack = 0
|
308
|
+
|
309
|
+
def __enter__(self):
|
310
|
+
self._context_stack += 1
|
311
|
+
if self._context_stack == 1:
|
312
|
+
self.sync_client.__enter__()
|
313
|
+
run_coro_as_sync(self.async_client.__aenter__())
|
314
|
+
return super().__enter__()
|
315
|
+
else:
|
316
|
+
return self
|
317
|
+
|
318
|
+
def __exit__(self, *exc_info):
|
319
|
+
self._context_stack -= 1
|
320
|
+
if self._context_stack == 0:
|
321
|
+
self.sync_client.__exit__(*exc_info)
|
322
|
+
run_coro_as_sync(self.async_client.__aexit__(*exc_info))
|
323
|
+
return super().__exit__(*exc_info)
|
324
|
+
|
325
|
+
@classmethod
|
326
|
+
@contextmanager
|
327
|
+
def get_or_create(cls) -> Generator["ClientContext", None, None]:
|
328
|
+
ctx = ClientContext.get()
|
329
|
+
if ctx:
|
330
|
+
yield ctx
|
331
|
+
else:
|
332
|
+
with ClientContext() as ctx:
|
333
|
+
yield ctx
|
334
|
+
|
335
|
+
|
204
336
|
class RunContext(ContextModel):
|
205
337
|
"""
|
206
338
|
The base context for a flow or task run. Data in this context will always be
|
@@ -211,10 +343,16 @@ class RunContext(ContextModel):
|
|
211
343
|
client: The Prefect client instance being used for API communication
|
212
344
|
"""
|
213
345
|
|
214
|
-
start_time:
|
346
|
+
start_time: DateTime = Field(default_factory=lambda: pendulum.now("UTC"))
|
215
347
|
input_keyset: Optional[Dict[str, Dict[str, str]]] = None
|
216
348
|
client: Union[PrefectClient, SyncPrefectClient]
|
217
349
|
|
350
|
+
def serialize(self):
|
351
|
+
return self.model_dump(
|
352
|
+
include={"start_time", "input_keyset"},
|
353
|
+
exclude_unset=True,
|
354
|
+
)
|
355
|
+
|
218
356
|
|
219
357
|
class EngineContext(RunContext):
|
220
358
|
"""
|
@@ -236,10 +374,14 @@ class EngineContext(RunContext):
|
|
236
374
|
flow: Optional["Flow"] = None
|
237
375
|
flow_run: Optional[FlowRun] = None
|
238
376
|
autonomous_task_run: Optional[TaskRun] = None
|
239
|
-
task_runner:
|
377
|
+
task_runner: TaskRunner
|
240
378
|
log_prints: bool = False
|
241
379
|
parameters: Optional[Dict[str, Any]] = None
|
242
380
|
|
381
|
+
# Flag signaling if the flow run context has been serialized and sent
|
382
|
+
# to remote infrastructure.
|
383
|
+
detached: bool = False
|
384
|
+
|
243
385
|
# Result handling
|
244
386
|
result_factory: ResultFactory
|
245
387
|
|
@@ -266,7 +408,20 @@ class EngineContext(RunContext):
|
|
266
408
|
# Events worker to emit events to Prefect Cloud
|
267
409
|
events: Optional[EventsWorker] = None
|
268
410
|
|
269
|
-
__var__ = ContextVar("flow_run")
|
411
|
+
__var__: ContextVar = ContextVar("flow_run")
|
412
|
+
|
413
|
+
def serialize(self):
|
414
|
+
return self.model_dump(
|
415
|
+
include={
|
416
|
+
"flow_run",
|
417
|
+
"flow",
|
418
|
+
"parameters",
|
419
|
+
"log_prints",
|
420
|
+
"start_time",
|
421
|
+
"input_keyset",
|
422
|
+
},
|
423
|
+
exclude_unset=True,
|
424
|
+
)
|
270
425
|
|
271
426
|
|
272
427
|
FlowRunContext = EngineContext # for backwards compatibility
|
@@ -292,6 +447,19 @@ class TaskRunContext(RunContext):
|
|
292
447
|
|
293
448
|
__var__ = ContextVar("task_run")
|
294
449
|
|
450
|
+
def serialize(self):
|
451
|
+
return self.model_dump(
|
452
|
+
include={
|
453
|
+
"task_run",
|
454
|
+
"task",
|
455
|
+
"parameters",
|
456
|
+
"log_prints",
|
457
|
+
"start_time",
|
458
|
+
"input_keyset",
|
459
|
+
},
|
460
|
+
exclude_unset=True,
|
461
|
+
)
|
462
|
+
|
295
463
|
|
296
464
|
class TagsContext(ContextModel):
|
297
465
|
"""
|
@@ -569,7 +737,7 @@ def root_settings_context():
|
|
569
737
|
|
570
738
|
|
571
739
|
GLOBAL_SETTINGS_CONTEXT: SettingsContext = root_settings_context()
|
572
|
-
GLOBAL_OBJECT_REGISTRY: ContextManager[PrefectObjectRegistry] = None
|
740
|
+
GLOBAL_OBJECT_REGISTRY: Optional[ContextManager[PrefectObjectRegistry]] = None
|
573
741
|
|
574
742
|
|
575
743
|
def initialize_object_registry():
|
prefect/deployments/__init__.py
CHANGED
@@ -4,15 +4,14 @@ from prefect.deployments.base import (
|
|
4
4
|
initialize_project,
|
5
5
|
)
|
6
6
|
|
7
|
-
from prefect.deployments.deployments import (
|
8
|
-
run_deployment,
|
9
|
-
load_flow_from_flow_run,
|
10
|
-
load_deployments_from_yaml,
|
11
|
-
Deployment,
|
12
|
-
)
|
13
7
|
from prefect.deployments.runner import (
|
14
8
|
RunnerDeployment,
|
15
9
|
deploy,
|
16
10
|
DeploymentImage,
|
17
11
|
EntrypointType,
|
18
12
|
)
|
13
|
+
|
14
|
+
|
15
|
+
from prefect.deployments.flow_runs import (
|
16
|
+
run_deployment,
|
17
|
+
)
|
prefect/deployments/base.py
CHANGED
@@ -19,7 +19,7 @@ import anyio
|
|
19
19
|
import yaml
|
20
20
|
from ruamel.yaml import YAML
|
21
21
|
|
22
|
-
from prefect.client.schemas.
|
22
|
+
from prefect.client.schemas.actions import DeploymentScheduleCreate
|
23
23
|
from prefect.client.schemas.schedules import IntervalSchedule
|
24
24
|
from prefect.logging import get_logger
|
25
25
|
from prefect.settings import PREFECT_DEBUG_MODE
|
@@ -260,7 +260,7 @@ def _format_deployment_for_saving_to_prefect_file(
|
|
260
260
|
if isinstance(deployment["schedule"], IntervalSchedule):
|
261
261
|
deployment["schedule"] = _interval_schedule_to_dict(deployment["schedule"])
|
262
262
|
else: # all valid SCHEDULE_TYPES are subclasses of BaseModel
|
263
|
-
deployment["schedule"] = deployment["schedule"].
|
263
|
+
deployment["schedule"] = deployment["schedule"].model_dump()
|
264
264
|
|
265
265
|
if "is_schedule_active" in deployment:
|
266
266
|
deployment["schedule"]["active"] = deployment.pop("is_schedule_active")
|
@@ -268,16 +268,18 @@ def _format_deployment_for_saving_to_prefect_file(
|
|
268
268
|
if deployment.get("schedules"):
|
269
269
|
schedules = []
|
270
270
|
for deployment_schedule in cast(
|
271
|
-
List[
|
271
|
+
List[DeploymentScheduleCreate], deployment["schedules"]
|
272
272
|
):
|
273
273
|
if isinstance(deployment_schedule.schedule, IntervalSchedule):
|
274
274
|
schedule_config = _interval_schedule_to_dict(
|
275
275
|
deployment_schedule.schedule
|
276
276
|
)
|
277
277
|
else: # all valid SCHEDULE_TYPES are subclasses of BaseModel
|
278
|
-
schedule_config = deployment_schedule.schedule.
|
278
|
+
schedule_config = deployment_schedule.schedule.model_dump()
|
279
279
|
|
280
280
|
schedule_config["active"] = deployment_schedule.active
|
281
|
+
schedule_config["max_active_runs"] = deployment_schedule.max_active_runs
|
282
|
+
schedule_config["catchup"] = deployment_schedule.catchup
|
281
283
|
schedules.append(schedule_config)
|
282
284
|
|
283
285
|
deployment["schedules"] = schedules
|
@@ -295,7 +297,7 @@ def _interval_schedule_to_dict(schedule: IntervalSchedule) -> Dict:
|
|
295
297
|
Returns:
|
296
298
|
- Dict: the schedule as a dictionary
|
297
299
|
"""
|
298
|
-
schedule_config = schedule.
|
300
|
+
schedule_config = schedule.model_dump()
|
299
301
|
schedule_config["interval"] = schedule_config["interval"].total_seconds()
|
300
302
|
schedule_config["anchor_date"] = schedule_config["anchor_date"].isoformat()
|
301
303
|
|
@@ -0,0 +1,185 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import TYPE_CHECKING, Iterable, Optional, Union
|
3
|
+
from uuid import UUID
|
4
|
+
|
5
|
+
import anyio
|
6
|
+
import pendulum
|
7
|
+
|
8
|
+
from prefect._internal.compatibility.deprecated import deprecated_parameter
|
9
|
+
from prefect.client.schemas import FlowRun
|
10
|
+
from prefect.client.utilities import inject_client
|
11
|
+
from prefect.context import FlowRunContext, TaskRunContext
|
12
|
+
from prefect.logging import get_logger
|
13
|
+
from prefect.states import Pending, Scheduled
|
14
|
+
from prefect.tasks import Task
|
15
|
+
from prefect.utilities.asyncutils import sync_compatible
|
16
|
+
from prefect.utilities.slugify import slugify
|
17
|
+
|
18
|
+
if TYPE_CHECKING:
|
19
|
+
from prefect.client.orchestration import PrefectClient
|
20
|
+
from prefect.client.schemas.objects import FlowRun
|
21
|
+
|
22
|
+
|
23
|
+
logger = get_logger(__name__)
|
24
|
+
|
25
|
+
|
26
|
+
@sync_compatible
|
27
|
+
@deprecated_parameter(
|
28
|
+
"infra_overrides",
|
29
|
+
start_date="Apr 2024",
|
30
|
+
help="Use `job_variables` instead.",
|
31
|
+
)
|
32
|
+
@inject_client
|
33
|
+
async def run_deployment(
|
34
|
+
name: Union[str, UUID],
|
35
|
+
client: Optional["PrefectClient"] = None,
|
36
|
+
parameters: Optional[dict] = None,
|
37
|
+
scheduled_time: Optional[datetime] = None,
|
38
|
+
flow_run_name: Optional[str] = None,
|
39
|
+
timeout: Optional[float] = None,
|
40
|
+
poll_interval: Optional[float] = 5,
|
41
|
+
tags: Optional[Iterable[str]] = None,
|
42
|
+
idempotency_key: Optional[str] = None,
|
43
|
+
work_queue_name: Optional[str] = None,
|
44
|
+
as_subflow: Optional[bool] = True,
|
45
|
+
infra_overrides: Optional[dict] = None,
|
46
|
+
job_variables: Optional[dict] = None,
|
47
|
+
) -> "FlowRun":
|
48
|
+
"""
|
49
|
+
Create a flow run for a deployment and return it after completion or a timeout.
|
50
|
+
|
51
|
+
By default, this function blocks until the flow run finishes executing.
|
52
|
+
Specify a timeout (in seconds) to wait for the flow run to execute before
|
53
|
+
returning flow run metadata. To return immediately, without waiting for the
|
54
|
+
flow run to execute, set `timeout=0`.
|
55
|
+
|
56
|
+
Note that if you specify a timeout, this function will return the flow run
|
57
|
+
metadata whether or not the flow run finished executing.
|
58
|
+
|
59
|
+
If called within a flow or task, the flow run this function creates will
|
60
|
+
be linked to the current flow run as a subflow. Disable this behavior by
|
61
|
+
passing `as_subflow=False`.
|
62
|
+
|
63
|
+
Args:
|
64
|
+
name: The deployment id or deployment name in the form:
|
65
|
+
`"flow name/deployment name"`
|
66
|
+
parameters: Parameter overrides for this flow run. Merged with the deployment
|
67
|
+
defaults.
|
68
|
+
scheduled_time: The time to schedule the flow run for, defaults to scheduling
|
69
|
+
the flow run to start now.
|
70
|
+
flow_run_name: A name for the created flow run
|
71
|
+
timeout: The amount of time to wait (in seconds) for the flow run to
|
72
|
+
complete before returning. Setting `timeout` to 0 will return the flow
|
73
|
+
run metadata immediately. Setting `timeout` to None will allow this
|
74
|
+
function to poll indefinitely. Defaults to None.
|
75
|
+
poll_interval: The number of seconds between polls
|
76
|
+
tags: A list of tags to associate with this flow run; tags can be used in
|
77
|
+
automations and for organizational purposes.
|
78
|
+
idempotency_key: A unique value to recognize retries of the same run, and
|
79
|
+
prevent creating multiple flow runs.
|
80
|
+
work_queue_name: The name of a work queue to use for this run. Defaults to
|
81
|
+
the default work queue for the deployment.
|
82
|
+
as_subflow: Whether to link the flow run as a subflow of the current
|
83
|
+
flow or task run.
|
84
|
+
job_variables: A dictionary of dot delimited infrastructure overrides that
|
85
|
+
will be applied at runtime; for example `env.CONFIG_KEY=config_value` or
|
86
|
+
`namespace='prefect'`
|
87
|
+
"""
|
88
|
+
if timeout is not None and timeout < 0:
|
89
|
+
raise ValueError("`timeout` cannot be negative")
|
90
|
+
|
91
|
+
if scheduled_time is None:
|
92
|
+
scheduled_time = pendulum.now("UTC")
|
93
|
+
|
94
|
+
parameters = parameters or {}
|
95
|
+
|
96
|
+
deployment_id = None
|
97
|
+
|
98
|
+
if isinstance(name, UUID):
|
99
|
+
deployment_id = name
|
100
|
+
else:
|
101
|
+
try:
|
102
|
+
deployment_id = UUID(name)
|
103
|
+
except ValueError:
|
104
|
+
pass
|
105
|
+
|
106
|
+
if deployment_id:
|
107
|
+
deployment = await client.read_deployment(deployment_id=deployment_id)
|
108
|
+
else:
|
109
|
+
deployment = await client.read_deployment_by_name(name)
|
110
|
+
|
111
|
+
flow_run_ctx = FlowRunContext.get()
|
112
|
+
task_run_ctx = TaskRunContext.get()
|
113
|
+
if as_subflow and (flow_run_ctx or task_run_ctx):
|
114
|
+
# TODO: this logic can likely be simplified by using `Task.create_run`
|
115
|
+
from prefect.utilities.engine import (
|
116
|
+
_dynamic_key_for_task_run,
|
117
|
+
collect_task_run_inputs,
|
118
|
+
)
|
119
|
+
|
120
|
+
# This was called from a flow. Link the flow run as a subflow.
|
121
|
+
task_inputs = {
|
122
|
+
k: await collect_task_run_inputs(v) for k, v in parameters.items()
|
123
|
+
}
|
124
|
+
|
125
|
+
if deployment_id:
|
126
|
+
flow = await client.read_flow(deployment.flow_id)
|
127
|
+
deployment_name = f"{flow.name}/{deployment.name}"
|
128
|
+
else:
|
129
|
+
deployment_name = name
|
130
|
+
|
131
|
+
# Generate a task in the parent flow run to represent the result of the subflow
|
132
|
+
dummy_task = Task(
|
133
|
+
name=deployment_name,
|
134
|
+
fn=lambda: None,
|
135
|
+
version=deployment.version,
|
136
|
+
)
|
137
|
+
# Override the default task key to include the deployment name
|
138
|
+
dummy_task.task_key = f"{__name__}.run_deployment.{slugify(deployment_name)}"
|
139
|
+
flow_run_id = (
|
140
|
+
flow_run_ctx.flow_run.id
|
141
|
+
if flow_run_ctx
|
142
|
+
else task_run_ctx.task_run.flow_run_id
|
143
|
+
)
|
144
|
+
dynamic_key = (
|
145
|
+
_dynamic_key_for_task_run(flow_run_ctx, dummy_task)
|
146
|
+
if flow_run_ctx
|
147
|
+
else task_run_ctx.task_run.dynamic_key
|
148
|
+
)
|
149
|
+
parent_task_run = await client.create_task_run(
|
150
|
+
task=dummy_task,
|
151
|
+
flow_run_id=flow_run_id,
|
152
|
+
dynamic_key=dynamic_key,
|
153
|
+
task_inputs=task_inputs,
|
154
|
+
state=Pending(),
|
155
|
+
)
|
156
|
+
parent_task_run_id = parent_task_run.id
|
157
|
+
else:
|
158
|
+
parent_task_run_id = None
|
159
|
+
|
160
|
+
flow_run = await client.create_flow_run_from_deployment(
|
161
|
+
deployment.id,
|
162
|
+
parameters=parameters,
|
163
|
+
state=Scheduled(scheduled_time=scheduled_time),
|
164
|
+
name=flow_run_name,
|
165
|
+
tags=tags,
|
166
|
+
idempotency_key=idempotency_key,
|
167
|
+
parent_task_run_id=parent_task_run_id,
|
168
|
+
work_queue_name=work_queue_name,
|
169
|
+
job_variables=job_variables,
|
170
|
+
)
|
171
|
+
|
172
|
+
flow_run_id = flow_run.id
|
173
|
+
|
174
|
+
if timeout == 0:
|
175
|
+
return flow_run
|
176
|
+
|
177
|
+
with anyio.move_on_after(timeout):
|
178
|
+
while True:
|
179
|
+
flow_run = await client.read_flow_run(flow_run_id)
|
180
|
+
flow_state = flow_run.state
|
181
|
+
if flow_state and flow_state.is_final():
|
182
|
+
return flow_run
|
183
|
+
await anyio.sleep(poll_interval)
|
184
|
+
|
185
|
+
return flow_run
|