prefect-client 2.19.3__py3-none-any.whl → 3.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/__init__.py +8 -56
- prefect/_internal/compatibility/deprecated.py +6 -115
- prefect/_internal/compatibility/experimental.py +4 -79
- prefect/_internal/concurrency/api.py +0 -34
- prefect/_internal/concurrency/calls.py +0 -6
- prefect/_internal/concurrency/cancellation.py +0 -3
- prefect/_internal/concurrency/event_loop.py +0 -20
- prefect/_internal/concurrency/inspection.py +3 -3
- prefect/_internal/concurrency/threads.py +35 -0
- prefect/_internal/concurrency/waiters.py +0 -28
- prefect/_internal/pydantic/__init__.py +0 -45
- prefect/_internal/pydantic/v1_schema.py +21 -22
- prefect/_internal/pydantic/v2_schema.py +0 -2
- prefect/_internal/pydantic/v2_validated_func.py +18 -23
- prefect/_internal/schemas/bases.py +44 -177
- prefect/_internal/schemas/fields.py +1 -43
- prefect/_internal/schemas/validators.py +60 -158
- prefect/artifacts.py +161 -14
- prefect/automations.py +39 -4
- prefect/blocks/abstract.py +1 -1
- prefect/blocks/core.py +268 -148
- prefect/blocks/fields.py +2 -57
- prefect/blocks/kubernetes.py +8 -12
- prefect/blocks/notifications.py +40 -20
- prefect/blocks/system.py +22 -11
- prefect/blocks/webhook.py +2 -9
- prefect/client/base.py +4 -4
- prefect/client/cloud.py +8 -13
- prefect/client/orchestration.py +347 -341
- prefect/client/schemas/actions.py +92 -86
- prefect/client/schemas/filters.py +20 -40
- prefect/client/schemas/objects.py +147 -145
- prefect/client/schemas/responses.py +16 -24
- prefect/client/schemas/schedules.py +47 -35
- prefect/client/subscriptions.py +2 -2
- prefect/client/utilities.py +5 -2
- prefect/concurrency/asyncio.py +3 -1
- prefect/concurrency/events.py +1 -1
- prefect/concurrency/services.py +6 -3
- prefect/context.py +195 -27
- prefect/deployments/__init__.py +5 -6
- prefect/deployments/base.py +7 -5
- prefect/deployments/flow_runs.py +185 -0
- prefect/deployments/runner.py +50 -45
- prefect/deployments/schedules.py +28 -23
- prefect/deployments/steps/__init__.py +0 -1
- prefect/deployments/steps/core.py +1 -0
- prefect/deployments/steps/pull.py +7 -21
- prefect/engine.py +12 -2422
- prefect/events/actions.py +17 -23
- prefect/events/cli/automations.py +19 -6
- prefect/events/clients.py +14 -37
- prefect/events/filters.py +14 -18
- prefect/events/related.py +2 -2
- prefect/events/schemas/__init__.py +0 -5
- prefect/events/schemas/automations.py +55 -46
- prefect/events/schemas/deployment_triggers.py +7 -197
- prefect/events/schemas/events.py +34 -65
- prefect/events/schemas/labelling.py +10 -14
- prefect/events/utilities.py +2 -3
- prefect/events/worker.py +2 -3
- prefect/filesystems.py +6 -517
- prefect/{new_flow_engine.py → flow_engine.py} +313 -72
- prefect/flow_runs.py +377 -5
- prefect/flows.py +248 -165
- prefect/futures.py +186 -345
- prefect/infrastructure/__init__.py +0 -27
- prefect/infrastructure/provisioners/__init__.py +5 -3
- prefect/infrastructure/provisioners/cloud_run.py +11 -6
- prefect/infrastructure/provisioners/container_instance.py +11 -7
- prefect/infrastructure/provisioners/ecs.py +6 -4
- prefect/infrastructure/provisioners/modal.py +8 -5
- prefect/input/actions.py +2 -4
- prefect/input/run_input.py +5 -7
- prefect/logging/formatters.py +0 -2
- prefect/logging/handlers.py +3 -11
- prefect/logging/loggers.py +2 -2
- prefect/manifests.py +2 -1
- prefect/records/__init__.py +1 -0
- prefect/records/result_store.py +42 -0
- prefect/records/store.py +9 -0
- prefect/results.py +43 -39
- prefect/runner/runner.py +9 -9
- prefect/runner/server.py +6 -10
- prefect/runner/storage.py +3 -8
- prefect/runner/submit.py +2 -2
- prefect/runner/utils.py +2 -2
- prefect/serializers.py +24 -35
- prefect/server/api/collections_data/views/aggregate-worker-metadata.json +5 -14
- prefect/settings.py +70 -133
- prefect/states.py +17 -47
- prefect/task_engine.py +697 -58
- prefect/task_runners.py +269 -301
- prefect/task_server.py +53 -34
- prefect/tasks.py +327 -337
- prefect/transactions.py +220 -0
- prefect/types/__init__.py +61 -82
- prefect/utilities/asyncutils.py +195 -136
- prefect/utilities/callables.py +121 -41
- prefect/utilities/collections.py +23 -38
- prefect/utilities/dispatch.py +11 -3
- prefect/utilities/dockerutils.py +4 -0
- prefect/utilities/engine.py +140 -20
- prefect/utilities/importtools.py +26 -27
- prefect/utilities/pydantic.py +128 -38
- prefect/utilities/schema_tools/hydration.py +5 -1
- prefect/utilities/templating.py +12 -2
- prefect/variables.py +78 -61
- prefect/workers/__init__.py +0 -1
- prefect/workers/base.py +15 -17
- prefect/workers/process.py +3 -8
- prefect/workers/server.py +2 -2
- {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/METADATA +22 -21
- prefect_client-3.0.0rc1.dist-info/RECORD +176 -0
- prefect/_internal/pydantic/_base_model.py +0 -51
- prefect/_internal/pydantic/_compat.py +0 -82
- prefect/_internal/pydantic/_flags.py +0 -20
- prefect/_internal/pydantic/_types.py +0 -8
- prefect/_internal/pydantic/utilities/__init__.py +0 -0
- prefect/_internal/pydantic/utilities/config_dict.py +0 -72
- prefect/_internal/pydantic/utilities/field_validator.py +0 -150
- prefect/_internal/pydantic/utilities/model_construct.py +0 -56
- prefect/_internal/pydantic/utilities/model_copy.py +0 -55
- prefect/_internal/pydantic/utilities/model_dump.py +0 -136
- prefect/_internal/pydantic/utilities/model_dump_json.py +0 -112
- prefect/_internal/pydantic/utilities/model_fields.py +0 -50
- prefect/_internal/pydantic/utilities/model_fields_set.py +0 -29
- prefect/_internal/pydantic/utilities/model_json_schema.py +0 -82
- prefect/_internal/pydantic/utilities/model_rebuild.py +0 -80
- prefect/_internal/pydantic/utilities/model_validate.py +0 -75
- prefect/_internal/pydantic/utilities/model_validate_json.py +0 -68
- prefect/_internal/pydantic/utilities/model_validator.py +0 -87
- prefect/_internal/pydantic/utilities/type_adapter.py +0 -71
- prefect/_vendor/__init__.py +0 -0
- prefect/_vendor/fastapi/__init__.py +0 -25
- prefect/_vendor/fastapi/applications.py +0 -946
- prefect/_vendor/fastapi/background.py +0 -3
- prefect/_vendor/fastapi/concurrency.py +0 -44
- prefect/_vendor/fastapi/datastructures.py +0 -58
- prefect/_vendor/fastapi/dependencies/__init__.py +0 -0
- prefect/_vendor/fastapi/dependencies/models.py +0 -64
- prefect/_vendor/fastapi/dependencies/utils.py +0 -877
- prefect/_vendor/fastapi/encoders.py +0 -177
- prefect/_vendor/fastapi/exception_handlers.py +0 -40
- prefect/_vendor/fastapi/exceptions.py +0 -46
- prefect/_vendor/fastapi/logger.py +0 -3
- prefect/_vendor/fastapi/middleware/__init__.py +0 -1
- prefect/_vendor/fastapi/middleware/asyncexitstack.py +0 -25
- prefect/_vendor/fastapi/middleware/cors.py +0 -3
- prefect/_vendor/fastapi/middleware/gzip.py +0 -3
- prefect/_vendor/fastapi/middleware/httpsredirect.py +0 -3
- prefect/_vendor/fastapi/middleware/trustedhost.py +0 -3
- prefect/_vendor/fastapi/middleware/wsgi.py +0 -3
- prefect/_vendor/fastapi/openapi/__init__.py +0 -0
- prefect/_vendor/fastapi/openapi/constants.py +0 -2
- prefect/_vendor/fastapi/openapi/docs.py +0 -203
- prefect/_vendor/fastapi/openapi/models.py +0 -480
- prefect/_vendor/fastapi/openapi/utils.py +0 -485
- prefect/_vendor/fastapi/param_functions.py +0 -340
- prefect/_vendor/fastapi/params.py +0 -453
- prefect/_vendor/fastapi/requests.py +0 -4
- prefect/_vendor/fastapi/responses.py +0 -40
- prefect/_vendor/fastapi/routing.py +0 -1331
- prefect/_vendor/fastapi/security/__init__.py +0 -15
- prefect/_vendor/fastapi/security/api_key.py +0 -98
- prefect/_vendor/fastapi/security/base.py +0 -6
- prefect/_vendor/fastapi/security/http.py +0 -172
- prefect/_vendor/fastapi/security/oauth2.py +0 -227
- prefect/_vendor/fastapi/security/open_id_connect_url.py +0 -34
- prefect/_vendor/fastapi/security/utils.py +0 -10
- prefect/_vendor/fastapi/staticfiles.py +0 -1
- prefect/_vendor/fastapi/templating.py +0 -3
- prefect/_vendor/fastapi/testclient.py +0 -1
- prefect/_vendor/fastapi/types.py +0 -3
- prefect/_vendor/fastapi/utils.py +0 -235
- prefect/_vendor/fastapi/websockets.py +0 -7
- prefect/_vendor/starlette/__init__.py +0 -1
- prefect/_vendor/starlette/_compat.py +0 -28
- prefect/_vendor/starlette/_exception_handler.py +0 -80
- prefect/_vendor/starlette/_utils.py +0 -88
- prefect/_vendor/starlette/applications.py +0 -261
- prefect/_vendor/starlette/authentication.py +0 -159
- prefect/_vendor/starlette/background.py +0 -43
- prefect/_vendor/starlette/concurrency.py +0 -59
- prefect/_vendor/starlette/config.py +0 -151
- prefect/_vendor/starlette/convertors.py +0 -87
- prefect/_vendor/starlette/datastructures.py +0 -707
- prefect/_vendor/starlette/endpoints.py +0 -130
- prefect/_vendor/starlette/exceptions.py +0 -60
- prefect/_vendor/starlette/formparsers.py +0 -276
- prefect/_vendor/starlette/middleware/__init__.py +0 -17
- prefect/_vendor/starlette/middleware/authentication.py +0 -52
- prefect/_vendor/starlette/middleware/base.py +0 -220
- prefect/_vendor/starlette/middleware/cors.py +0 -176
- prefect/_vendor/starlette/middleware/errors.py +0 -265
- prefect/_vendor/starlette/middleware/exceptions.py +0 -74
- prefect/_vendor/starlette/middleware/gzip.py +0 -113
- prefect/_vendor/starlette/middleware/httpsredirect.py +0 -19
- prefect/_vendor/starlette/middleware/sessions.py +0 -82
- prefect/_vendor/starlette/middleware/trustedhost.py +0 -64
- prefect/_vendor/starlette/middleware/wsgi.py +0 -147
- prefect/_vendor/starlette/requests.py +0 -328
- prefect/_vendor/starlette/responses.py +0 -347
- prefect/_vendor/starlette/routing.py +0 -933
- prefect/_vendor/starlette/schemas.py +0 -154
- prefect/_vendor/starlette/staticfiles.py +0 -248
- prefect/_vendor/starlette/status.py +0 -199
- prefect/_vendor/starlette/templating.py +0 -231
- prefect/_vendor/starlette/testclient.py +0 -804
- prefect/_vendor/starlette/types.py +0 -30
- prefect/_vendor/starlette/websockets.py +0 -193
- prefect/agent.py +0 -698
- prefect/deployments/deployments.py +0 -1042
- prefect/deprecated/__init__.py +0 -0
- prefect/deprecated/data_documents.py +0 -350
- prefect/deprecated/packaging/__init__.py +0 -12
- prefect/deprecated/packaging/base.py +0 -96
- prefect/deprecated/packaging/docker.py +0 -146
- prefect/deprecated/packaging/file.py +0 -92
- prefect/deprecated/packaging/orion.py +0 -80
- prefect/deprecated/packaging/serializers.py +0 -171
- prefect/events/instrument.py +0 -135
- prefect/infrastructure/base.py +0 -323
- prefect/infrastructure/container.py +0 -818
- prefect/infrastructure/kubernetes.py +0 -920
- prefect/infrastructure/process.py +0 -289
- prefect/new_task_engine.py +0 -423
- prefect/pydantic/__init__.py +0 -76
- prefect/pydantic/main.py +0 -39
- prefect/software/__init__.py +0 -2
- prefect/software/base.py +0 -50
- prefect/software/conda.py +0 -199
- prefect/software/pip.py +0 -122
- prefect/software/python.py +0 -52
- prefect/workers/block.py +0 -218
- prefect_client-2.19.3.dist-info/RECORD +0 -292
- {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/LICENSE +0 -0
- {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/WHEEL +0 -0
- {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/top_level.txt +0 -0
prefect/task_engine.py
CHANGED
@@ -1,76 +1,715 @@
|
|
1
|
-
|
1
|
+
import inspect
|
2
|
+
import logging
|
3
|
+
import time
|
4
|
+
from contextlib import ExitStack, contextmanager
|
5
|
+
from dataclasses import dataclass, field
|
2
6
|
from typing import (
|
3
7
|
Any,
|
8
|
+
Callable,
|
9
|
+
Coroutine,
|
4
10
|
Dict,
|
11
|
+
Generator,
|
12
|
+
Generic,
|
5
13
|
Iterable,
|
14
|
+
Literal,
|
6
15
|
Optional,
|
16
|
+
Set,
|
17
|
+
TypeVar,
|
18
|
+
Union,
|
7
19
|
)
|
20
|
+
from uuid import UUID
|
8
21
|
|
9
|
-
import
|
10
|
-
from typing_extensions import
|
11
|
-
|
12
|
-
from prefect
|
13
|
-
from prefect.
|
14
|
-
from prefect.client.
|
15
|
-
from prefect.
|
16
|
-
from prefect.
|
17
|
-
|
18
|
-
|
19
|
-
|
22
|
+
import pendulum
|
23
|
+
from typing_extensions import ParamSpec
|
24
|
+
|
25
|
+
from prefect import Task
|
26
|
+
from prefect._internal.concurrency.api import create_call, from_sync
|
27
|
+
from prefect.client.orchestration import SyncPrefectClient
|
28
|
+
from prefect.client.schemas import TaskRun
|
29
|
+
from prefect.client.schemas.objects import State, TaskRunInput
|
30
|
+
from prefect.context import (
|
31
|
+
ClientContext,
|
32
|
+
FlowRunContext,
|
33
|
+
TaskRunContext,
|
34
|
+
hydrated_context,
|
35
|
+
)
|
36
|
+
from prefect.events.schemas.events import Event
|
37
|
+
from prefect.exceptions import (
|
38
|
+
Abort,
|
39
|
+
Pause,
|
40
|
+
PrefectException,
|
41
|
+
UpstreamTaskError,
|
20
42
|
)
|
21
43
|
from prefect.futures import PrefectFuture
|
22
|
-
from prefect.
|
23
|
-
from prefect.
|
24
|
-
from prefect.
|
25
|
-
from prefect.
|
44
|
+
from prefect.logging.handlers import APILogHandler
|
45
|
+
from prefect.logging.loggers import get_logger, patch_print, task_run_logger
|
46
|
+
from prefect.records.result_store import ResultFactoryStore
|
47
|
+
from prefect.results import ResultFactory, _format_user_supplied_storage_key
|
48
|
+
from prefect.settings import (
|
49
|
+
PREFECT_DEBUG_MODE,
|
50
|
+
PREFECT_TASKS_REFRESH_CACHE,
|
51
|
+
)
|
52
|
+
from prefect.states import (
|
53
|
+
Failed,
|
54
|
+
Paused,
|
55
|
+
Pending,
|
56
|
+
Retrying,
|
57
|
+
Running,
|
58
|
+
StateDetails,
|
59
|
+
exception_to_crashed_state,
|
60
|
+
exception_to_failed_state,
|
61
|
+
return_value_to_state,
|
62
|
+
)
|
63
|
+
from prefect.transactions import Transaction, transaction
|
64
|
+
from prefect.utilities.asyncutils import run_coro_as_sync
|
65
|
+
from prefect.utilities.callables import parameters_to_args_kwargs
|
66
|
+
from prefect.utilities.collections import visit_collection
|
67
|
+
from prefect.utilities.engine import (
|
68
|
+
_get_hook_name,
|
69
|
+
emit_task_run_state_change_event,
|
70
|
+
propose_state_sync,
|
71
|
+
resolve_to_final_result,
|
72
|
+
)
|
73
|
+
from prefect.utilities.math import clamped_poisson_interval
|
74
|
+
from prefect.utilities.timeout import timeout, timeout_async
|
26
75
|
|
27
|
-
|
76
|
+
P = ParamSpec("P")
|
77
|
+
R = TypeVar("R")
|
28
78
|
|
29
79
|
|
30
|
-
@
|
31
|
-
|
32
|
-
task: Task,
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
80
|
+
@dataclass
|
81
|
+
class TaskRunEngine(Generic[P, R]):
|
82
|
+
task: Union[Task[P, R], Task[P, Coroutine[Any, Any, R]]]
|
83
|
+
logger: logging.Logger = field(default_factory=lambda: get_logger("engine"))
|
84
|
+
parameters: Optional[Dict[str, Any]] = None
|
85
|
+
task_run: Optional[TaskRun] = None
|
86
|
+
retries: int = 0
|
87
|
+
wait_for: Optional[Iterable[PrefectFuture]] = None
|
88
|
+
context: Optional[Dict[str, Any]] = None
|
89
|
+
_initial_run_context: Optional[TaskRunContext] = None
|
90
|
+
_is_started: bool = False
|
91
|
+
_client: Optional[SyncPrefectClient] = None
|
92
|
+
_task_name_set: bool = False
|
93
|
+
_last_event: Optional[Event] = None
|
94
|
+
|
95
|
+
def __post_init__(self):
|
96
|
+
if self.parameters is None:
|
97
|
+
self.parameters = {}
|
98
|
+
|
99
|
+
@property
|
100
|
+
def client(self) -> SyncPrefectClient:
|
101
|
+
if not self._is_started or self._client is None:
|
102
|
+
raise RuntimeError("Engine has not started.")
|
103
|
+
return self._client
|
104
|
+
|
105
|
+
@property
|
106
|
+
def state(self) -> State:
|
107
|
+
if not self.task_run:
|
108
|
+
raise ValueError("Task run is not set")
|
109
|
+
return self.task_run.state
|
110
|
+
|
111
|
+
@property
|
112
|
+
def can_retry(self) -> bool:
|
113
|
+
retry_condition: Optional[
|
114
|
+
Callable[[Task[P, Coroutine[Any, Any, R]], TaskRun, State], bool]
|
115
|
+
] = self.task.retry_condition_fn
|
116
|
+
if not self.task_run:
|
117
|
+
raise ValueError("Task run is not set")
|
118
|
+
try:
|
119
|
+
self.logger.debug(
|
120
|
+
f"Running `retry_condition_fn` check {retry_condition!r} for task"
|
121
|
+
f" {self.task.name!r}"
|
61
122
|
)
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
123
|
+
return not retry_condition or retry_condition(
|
124
|
+
self.task, self.task_run, self.state
|
125
|
+
)
|
126
|
+
except Exception:
|
127
|
+
self.logger.error(
|
128
|
+
(
|
129
|
+
"An error was encountered while running `retry_condition_fn` check"
|
130
|
+
f" '{retry_condition!r}' for task {self.task.name!r}"
|
131
|
+
),
|
132
|
+
exc_info=True,
|
133
|
+
)
|
134
|
+
return False
|
135
|
+
|
136
|
+
def get_hooks(self, state: State, as_async: bool = False) -> Iterable[Callable]:
|
137
|
+
task = self.task
|
138
|
+
task_run = self.task_run
|
139
|
+
|
140
|
+
if not task_run:
|
141
|
+
raise ValueError("Task run is not set")
|
142
|
+
|
143
|
+
hooks = None
|
144
|
+
if state.is_failed() and task.on_failure_hooks:
|
145
|
+
hooks = task.on_failure_hooks
|
146
|
+
elif state.is_completed() and task.on_completion_hooks:
|
147
|
+
hooks = task.on_completion_hooks
|
148
|
+
|
149
|
+
for hook in hooks or []:
|
150
|
+
hook_name = _get_hook_name(hook)
|
151
|
+
|
152
|
+
@contextmanager
|
153
|
+
def hook_context():
|
154
|
+
try:
|
155
|
+
self.logger.info(
|
156
|
+
f"Running hook {hook_name!r} in response to entering state"
|
157
|
+
f" {state.name!r}"
|
158
|
+
)
|
159
|
+
yield
|
160
|
+
except Exception:
|
161
|
+
self.logger.error(
|
162
|
+
f"An error was encountered while running hook {hook_name!r}",
|
163
|
+
exc_info=True,
|
164
|
+
)
|
165
|
+
else:
|
166
|
+
self.logger.info(
|
167
|
+
f"Hook {hook_name!r} finished running successfully"
|
168
|
+
)
|
169
|
+
|
170
|
+
if as_async:
|
171
|
+
|
172
|
+
async def _hook_fn():
|
173
|
+
with hook_context():
|
174
|
+
result = hook(task, task_run, state)
|
175
|
+
if inspect.isawaitable(result):
|
176
|
+
await result
|
177
|
+
|
66
178
|
else:
|
67
|
-
|
68
|
-
|
179
|
+
|
180
|
+
def _hook_fn():
|
181
|
+
with hook_context():
|
182
|
+
result = hook(task, task_run, state)
|
183
|
+
if inspect.isawaitable(result):
|
184
|
+
run_coro_as_sync(result)
|
185
|
+
|
186
|
+
yield _hook_fn
|
187
|
+
|
188
|
+
def compute_transaction_key(self) -> str:
|
189
|
+
if self.task.result_storage_key is not None:
|
190
|
+
key = _format_user_supplied_storage_key(self.task.result_storage_key)
|
191
|
+
else:
|
192
|
+
key = str(self.task_run.id)
|
193
|
+
return key
|
194
|
+
|
195
|
+
def _compute_state_details(
|
196
|
+
self, include_cache_expiration: bool = False
|
197
|
+
) -> StateDetails:
|
198
|
+
task_run_context = TaskRunContext.get()
|
199
|
+
## setup cache metadata
|
200
|
+
cache_key = (
|
201
|
+
self.task.cache_key_fn(
|
202
|
+
task_run_context,
|
203
|
+
self.parameters or {},
|
204
|
+
)
|
205
|
+
if self.task.cache_key_fn
|
206
|
+
else None
|
207
|
+
)
|
208
|
+
# Ignore the cached results for a cache key, default = false
|
209
|
+
# Setting on task level overrules the Prefect setting (env var)
|
210
|
+
refresh_cache = (
|
211
|
+
self.task.refresh_cache
|
212
|
+
if self.task.refresh_cache is not None
|
213
|
+
else PREFECT_TASKS_REFRESH_CACHE.value()
|
214
|
+
)
|
215
|
+
|
216
|
+
if include_cache_expiration:
|
217
|
+
cache_expiration = (
|
218
|
+
(pendulum.now("utc") + self.task.cache_expiration)
|
219
|
+
if self.task.cache_expiration
|
220
|
+
else None
|
221
|
+
)
|
222
|
+
else:
|
223
|
+
cache_expiration = None
|
224
|
+
|
225
|
+
return StateDetails(
|
226
|
+
cache_key=cache_key,
|
227
|
+
refresh_cache=refresh_cache,
|
228
|
+
cache_expiration=cache_expiration,
|
229
|
+
)
|
230
|
+
|
231
|
+
def _resolve_parameters(self):
|
232
|
+
if not self.parameters:
|
233
|
+
return {}
|
234
|
+
|
235
|
+
resolved_parameters = {}
|
236
|
+
for parameter, value in self.parameters.items():
|
237
|
+
try:
|
238
|
+
resolved_parameters[parameter] = visit_collection(
|
239
|
+
value,
|
240
|
+
visit_fn=resolve_to_final_result,
|
241
|
+
return_data=True,
|
242
|
+
max_depth=-1,
|
243
|
+
remove_annotations=True,
|
244
|
+
context={},
|
245
|
+
)
|
246
|
+
except UpstreamTaskError:
|
247
|
+
raise
|
248
|
+
except Exception as exc:
|
249
|
+
raise PrefectException(
|
250
|
+
f"Failed to resolve inputs in parameter {parameter!r}. If your"
|
251
|
+
" parameter type is not supported, consider using the `quote`"
|
252
|
+
" annotation to skip resolution of inputs."
|
253
|
+
) from exc
|
254
|
+
|
255
|
+
self.parameters = resolved_parameters
|
256
|
+
|
257
|
+
def _wait_for_dependencies(self):
|
258
|
+
if not self.wait_for:
|
259
|
+
return
|
260
|
+
|
261
|
+
visit_collection(
|
262
|
+
self.wait_for,
|
263
|
+
visit_fn=resolve_to_final_result,
|
264
|
+
return_data=False,
|
265
|
+
max_depth=-1,
|
266
|
+
remove_annotations=True,
|
267
|
+
context={},
|
268
|
+
)
|
269
|
+
|
270
|
+
def begin_run(self):
|
271
|
+
try:
|
272
|
+
self._resolve_parameters()
|
273
|
+
self._wait_for_dependencies()
|
274
|
+
except UpstreamTaskError as upstream_exc:
|
275
|
+
state = self.set_state(
|
276
|
+
Pending(
|
277
|
+
name="NotReady",
|
278
|
+
message=str(upstream_exc),
|
279
|
+
),
|
280
|
+
# if orchestrating a run already in a pending state, force orchestration to
|
281
|
+
# update the state name
|
282
|
+
force=self.state.is_pending(),
|
283
|
+
)
|
284
|
+
return
|
285
|
+
|
286
|
+
state_details = self._compute_state_details()
|
287
|
+
new_state = Running(state_details=state_details)
|
288
|
+
state = self.set_state(new_state)
|
289
|
+
|
290
|
+
BACKOFF_MAX = 10
|
291
|
+
backoff_count = 0
|
292
|
+
|
293
|
+
# TODO: Could this listen for state change events instead of polling?
|
294
|
+
while state.is_pending() or state.is_paused():
|
295
|
+
if backoff_count < BACKOFF_MAX:
|
296
|
+
backoff_count += 1
|
297
|
+
interval = clamped_poisson_interval(
|
298
|
+
average_interval=backoff_count, clamping_factor=0.3
|
299
|
+
)
|
300
|
+
time.sleep(interval)
|
301
|
+
state = self.set_state(new_state)
|
302
|
+
|
303
|
+
def set_state(self, state: State, force: bool = False) -> State:
|
304
|
+
last_state = self.state
|
305
|
+
if not self.task_run:
|
306
|
+
raise ValueError("Task run is not set")
|
307
|
+
try:
|
308
|
+
new_state = propose_state_sync(
|
309
|
+
self.client, state, task_run_id=self.task_run.id, force=force
|
310
|
+
)
|
311
|
+
except Pause as exc:
|
312
|
+
# We shouldn't get a pause signal without a state, but if this happens,
|
313
|
+
# just use a Paused state to assume an in-process pause.
|
314
|
+
new_state = exc.state if exc.state else Paused()
|
315
|
+
if new_state.state_details.pause_reschedule:
|
316
|
+
# If we're being asked to pause and reschedule, we should exit the
|
317
|
+
# task and expect to be resumed later.
|
318
|
+
raise
|
319
|
+
|
320
|
+
# currently this is a hack to keep a reference to the state object
|
321
|
+
# that has an in-memory result attached to it; using the API state
|
322
|
+
|
323
|
+
# could result in losing that reference
|
324
|
+
self.task_run.state = new_state
|
325
|
+
# emit a state change event
|
326
|
+
self._last_event = emit_task_run_state_change_event(
|
327
|
+
task_run=self.task_run,
|
328
|
+
initial_state=last_state,
|
329
|
+
validated_state=self.task_run.state,
|
330
|
+
follows=self._last_event,
|
331
|
+
)
|
332
|
+
return new_state
|
333
|
+
|
334
|
+
def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
|
335
|
+
_result = self.state.result(raise_on_failure=raise_on_failure, fetch=True)
|
336
|
+
# state.result is a `sync_compatible` function that may or may not return an awaitable
|
337
|
+
# depending on whether the parent frame is sync or not
|
338
|
+
if inspect.isawaitable(_result):
|
339
|
+
_result = run_coro_as_sync(_result)
|
340
|
+
return _result
|
341
|
+
|
342
|
+
def handle_success(self, result: R, transaction: Transaction) -> R:
|
343
|
+
result_factory = getattr(TaskRunContext.get(), "result_factory", None)
|
344
|
+
if result_factory is None:
|
345
|
+
raise ValueError("Result factory is not set")
|
346
|
+
|
347
|
+
# dont put this inside function, else the transaction could get serialized
|
348
|
+
key = transaction.key
|
349
|
+
|
350
|
+
def key_fn():
|
351
|
+
return key
|
352
|
+
|
353
|
+
result_factory.storage_key_fn = key_fn
|
354
|
+
terminal_state = run_coro_as_sync(
|
355
|
+
return_value_to_state(
|
356
|
+
result,
|
357
|
+
result_factory=result_factory,
|
358
|
+
)
|
359
|
+
)
|
360
|
+
transaction.stage(
|
361
|
+
terminal_state.data,
|
362
|
+
on_rollback_hooks=self.task.on_rollback_hooks,
|
363
|
+
on_commit_hooks=self.task.on_commit_hooks,
|
364
|
+
)
|
365
|
+
terminal_state.state_details = self._compute_state_details(
|
366
|
+
include_cache_expiration=True
|
367
|
+
)
|
368
|
+
self.set_state(terminal_state)
|
369
|
+
return result
|
370
|
+
|
371
|
+
def handle_retry(self, exc: Exception) -> bool:
|
372
|
+
"""
|
373
|
+
If the task has retries left, and the retry condition is met, set the task to retrying.
|
374
|
+
- If the task has no retries left, or the retry condition is not met, return False.
|
375
|
+
- If the task has retries left, and the retry condition is met, return True.
|
376
|
+
"""
|
377
|
+
if self.retries < self.task.retries and self.can_retry:
|
378
|
+
self.set_state(Retrying(), force=True)
|
379
|
+
self.retries = self.retries + 1
|
380
|
+
return True
|
381
|
+
return False
|
382
|
+
|
383
|
+
def handle_exception(self, exc: Exception) -> None:
|
384
|
+
# If the task fails, and we have retries left, set the task to retrying.
|
385
|
+
if not self.handle_retry(exc):
|
386
|
+
# If the task has no retries left, or the retry condition is not met, set the task to failed.
|
387
|
+
context = TaskRunContext.get()
|
388
|
+
state = run_coro_as_sync(
|
389
|
+
exception_to_failed_state(
|
390
|
+
exc,
|
391
|
+
message="Task run encountered an exception",
|
392
|
+
result_factory=getattr(context, "result_factory", None),
|
69
393
|
)
|
394
|
+
)
|
395
|
+
self.set_state(state)
|
396
|
+
|
397
|
+
def handle_timeout(self, exc: TimeoutError) -> None:
|
398
|
+
if not self.handle_retry(exc):
|
399
|
+
message = (
|
400
|
+
f"Task run exceeded timeout of {self.task.timeout_seconds} seconds"
|
401
|
+
)
|
402
|
+
self.logger.error(message)
|
403
|
+
state = Failed(
|
404
|
+
data=exc,
|
405
|
+
message=message,
|
406
|
+
name="TimedOut",
|
407
|
+
)
|
408
|
+
self.set_state(state)
|
409
|
+
|
410
|
+
def handle_crash(self, exc: BaseException) -> None:
|
411
|
+
state = run_coro_as_sync(exception_to_crashed_state(exc))
|
412
|
+
self.logger.error(f"Crash detected! {state.message}")
|
413
|
+
self.logger.debug("Crash details:", exc_info=exc)
|
414
|
+
self.set_state(state, force=True)
|
415
|
+
|
416
|
+
@contextmanager
|
417
|
+
def enter_run_context(self, client: Optional[SyncPrefectClient] = None):
|
418
|
+
from prefect.utilities.engine import (
|
419
|
+
_resolve_custom_task_run_name,
|
420
|
+
should_log_prints,
|
421
|
+
)
|
70
422
|
|
71
|
-
|
72
|
-
|
73
|
-
|
423
|
+
if client is None:
|
424
|
+
client = self.client
|
425
|
+
if not self.task_run:
|
426
|
+
raise ValueError("Task run is not set")
|
427
|
+
|
428
|
+
self.task_run = client.read_task_run(self.task_run.id)
|
429
|
+
with ExitStack() as stack:
|
430
|
+
if log_prints := should_log_prints(self.task):
|
431
|
+
stack.enter_context(patch_print())
|
432
|
+
stack.enter_context(
|
433
|
+
TaskRunContext(
|
434
|
+
task=self.task,
|
435
|
+
log_prints=log_prints,
|
436
|
+
task_run=self.task_run,
|
437
|
+
parameters=self.parameters,
|
438
|
+
result_factory=run_coro_as_sync(
|
439
|
+
ResultFactory.from_autonomous_task(self.task)
|
440
|
+
), # type: ignore
|
74
441
|
client=client,
|
75
442
|
)
|
76
|
-
|
443
|
+
)
|
444
|
+
# set the logger to the task run logger
|
445
|
+
self.logger = task_run_logger(task_run=self.task_run, task=self.task) # type: ignore
|
446
|
+
|
447
|
+
# update the task run name if necessary
|
448
|
+
if not self._task_name_set and self.task.task_run_name:
|
449
|
+
task_run_name = _resolve_custom_task_run_name(
|
450
|
+
task=self.task, parameters=self.parameters
|
451
|
+
)
|
452
|
+
self.client.set_task_run_name(
|
453
|
+
task_run_id=self.task_run.id, name=task_run_name
|
454
|
+
)
|
455
|
+
self.logger.extra["task_run_name"] = task_run_name
|
456
|
+
self.logger.debug(
|
457
|
+
f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
|
458
|
+
)
|
459
|
+
self.task_run.name = task_run_name
|
460
|
+
self._task_name_set = True
|
461
|
+
yield
|
462
|
+
|
463
|
+
@contextmanager
|
464
|
+
def start(
|
465
|
+
self,
|
466
|
+
task_run_id: Optional[UUID] = None,
|
467
|
+
dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
|
468
|
+
) -> Generator["TaskRunEngine", Any, Any]:
|
469
|
+
"""
|
470
|
+
Enters a client context and creates a task run if needed.
|
471
|
+
"""
|
472
|
+
with hydrated_context(self.context):
|
473
|
+
with ClientContext.get_or_create() as client_ctx:
|
474
|
+
self._client = client_ctx.sync_client
|
475
|
+
self._is_started = True
|
476
|
+
try:
|
477
|
+
if not self.task_run:
|
478
|
+
self.task_run = run_coro_as_sync(
|
479
|
+
self.task.create_run(
|
480
|
+
id=task_run_id,
|
481
|
+
parameters=self.parameters,
|
482
|
+
flow_run_context=FlowRunContext.get(),
|
483
|
+
parent_task_run_context=TaskRunContext.get(),
|
484
|
+
wait_for=self.wait_for,
|
485
|
+
extra_task_inputs=dependencies,
|
486
|
+
)
|
487
|
+
)
|
488
|
+
self.logger.info(
|
489
|
+
f"Created task run {self.task_run.name!r} for task {self.task.name!r}"
|
490
|
+
)
|
491
|
+
# Emit an event to capture that the task run was in the `PENDING` state.
|
492
|
+
self._last_event = emit_task_run_state_change_event(
|
493
|
+
task_run=self.task_run,
|
494
|
+
initial_state=None,
|
495
|
+
validated_state=self.task_run.state,
|
496
|
+
)
|
497
|
+
|
498
|
+
yield self
|
499
|
+
except Exception:
|
500
|
+
# regular exceptions are caught and re-raised to the user
|
501
|
+
raise
|
502
|
+
except (Pause, Abort):
|
503
|
+
# Do not capture internal signals as crashes
|
504
|
+
raise
|
505
|
+
except BaseException as exc:
|
506
|
+
# BaseExceptions are caught and handled as crashes
|
507
|
+
self.handle_crash(exc)
|
508
|
+
raise
|
509
|
+
finally:
|
510
|
+
# If debugging, use the more complete `repr` than the usual `str` description
|
511
|
+
display_state = (
|
512
|
+
repr(self.state) if PREFECT_DEBUG_MODE else str(self.state)
|
513
|
+
)
|
514
|
+
self.logger.log(
|
515
|
+
level=(
|
516
|
+
logging.INFO if self.state.is_completed() else logging.ERROR
|
517
|
+
),
|
518
|
+
msg=f"Finished in state {display_state}",
|
519
|
+
)
|
520
|
+
|
521
|
+
# flush all logs if this is not a "top" level run
|
522
|
+
if not (FlowRunContext.get() or TaskRunContext.get()):
|
523
|
+
from_sync.call_soon_in_loop_thread(
|
524
|
+
create_call(APILogHandler.aflush)
|
525
|
+
)
|
526
|
+
|
527
|
+
self._is_started = False
|
528
|
+
self._client = None
|
529
|
+
|
530
|
+
def is_running(self) -> bool:
|
531
|
+
if getattr(self, "task_run", None) is None:
|
532
|
+
return False
|
533
|
+
return getattr(self, "task_run").state.is_running()
|
534
|
+
|
535
|
+
|
536
|
+
def run_task_sync(
|
537
|
+
task: Task[P, R],
|
538
|
+
task_run_id: Optional[UUID] = None,
|
539
|
+
task_run: Optional[TaskRun] = None,
|
540
|
+
parameters: Optional[Dict[str, Any]] = None,
|
541
|
+
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
542
|
+
return_type: Literal["state", "result"] = "result",
|
543
|
+
dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
|
544
|
+
context: Optional[Dict[str, Any]] = None,
|
545
|
+
) -> Union[R, State, None]:
|
546
|
+
engine = TaskRunEngine[P, R](
|
547
|
+
task=task,
|
548
|
+
parameters=parameters,
|
549
|
+
task_run=task_run,
|
550
|
+
wait_for=wait_for,
|
551
|
+
context=context,
|
552
|
+
)
|
553
|
+
# This is a context manager that keeps track of the run of the task run.
|
554
|
+
with engine.start(task_run_id=task_run_id, dependencies=dependencies) as run:
|
555
|
+
with run.enter_run_context():
|
556
|
+
run.begin_run()
|
557
|
+
while run.is_running():
|
558
|
+
# enter run context on each loop iteration to ensure the context
|
559
|
+
# contains the latest task run metadata
|
560
|
+
with run.enter_run_context():
|
561
|
+
try:
|
562
|
+
# This is where the task is actually run.
|
563
|
+
with timeout(seconds=run.task.timeout_seconds):
|
564
|
+
call_args, call_kwargs = parameters_to_args_kwargs(
|
565
|
+
task.fn, run.parameters or {}
|
566
|
+
)
|
567
|
+
run.logger.debug(
|
568
|
+
f"Executing task {task.name!r} for task run {run.task_run.name!r}..."
|
569
|
+
)
|
570
|
+
result_factory = getattr(
|
571
|
+
TaskRunContext.get(), "result_factory", None
|
572
|
+
)
|
573
|
+
with transaction(
|
574
|
+
key=run.compute_transaction_key(),
|
575
|
+
store=ResultFactoryStore(result_factory=result_factory),
|
576
|
+
) as txn:
|
577
|
+
if txn.is_committed():
|
578
|
+
result = txn.read()
|
579
|
+
else:
|
580
|
+
result = task.fn(*call_args, **call_kwargs) # type: ignore
|
581
|
+
|
582
|
+
# If the task run is successful, finalize it.
|
583
|
+
# do this within the transaction lifecycle
|
584
|
+
# in order to get the proper result serialization
|
585
|
+
run.handle_success(result, transaction=txn)
|
586
|
+
|
587
|
+
except TimeoutError as exc:
|
588
|
+
run.handle_timeout(exc)
|
589
|
+
except Exception as exc:
|
590
|
+
run.handle_exception(exc)
|
591
|
+
|
592
|
+
if run.state.is_final():
|
593
|
+
for hook in run.get_hooks(run.state):
|
594
|
+
hook()
|
595
|
+
|
596
|
+
if return_type == "state":
|
597
|
+
return run.state
|
598
|
+
return run.result()
|
599
|
+
|
600
|
+
|
601
|
+
async def run_task_async(
|
602
|
+
task: Task[P, Coroutine[Any, Any, R]],
|
603
|
+
task_run_id: Optional[UUID] = None,
|
604
|
+
task_run: Optional[TaskRun] = None,
|
605
|
+
parameters: Optional[Dict[str, Any]] = None,
|
606
|
+
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
607
|
+
return_type: Literal["state", "result"] = "result",
|
608
|
+
dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
|
609
|
+
context: Optional[Dict[str, Any]] = None,
|
610
|
+
) -> Union[R, State, None]:
|
611
|
+
"""
|
612
|
+
Runs a task against the API.
|
613
|
+
|
614
|
+
We will most likely want to use this logic as a wrapper and return a coroutine for type inference.
|
615
|
+
"""
|
616
|
+
|
617
|
+
engine = TaskRunEngine[P, R](
|
618
|
+
task=task,
|
619
|
+
parameters=parameters,
|
620
|
+
task_run=task_run,
|
621
|
+
wait_for=wait_for,
|
622
|
+
context=context,
|
623
|
+
)
|
624
|
+
# This is a context manager that keeps track of the run of the task run.
|
625
|
+
with engine.start(task_run_id=task_run_id, dependencies=dependencies) as run:
|
626
|
+
with run.enter_run_context():
|
627
|
+
run.begin_run()
|
628
|
+
|
629
|
+
while run.is_running():
|
630
|
+
# enter run context on each loop iteration to ensure the context
|
631
|
+
# contains the latest task run metadata
|
632
|
+
with run.enter_run_context():
|
633
|
+
try:
|
634
|
+
# This is where the task is actually run.
|
635
|
+
with timeout_async(seconds=run.task.timeout_seconds):
|
636
|
+
call_args, call_kwargs = parameters_to_args_kwargs(
|
637
|
+
task.fn, run.parameters or {}
|
638
|
+
)
|
639
|
+
run.logger.debug(
|
640
|
+
f"Executing task {task.name!r} for task run {run.task_run.name!r}..."
|
641
|
+
)
|
642
|
+
result_factory = getattr(
|
643
|
+
TaskRunContext.get(), "result_factory", None
|
644
|
+
)
|
645
|
+
with transaction(
|
646
|
+
key=run.compute_transaction_key(),
|
647
|
+
store=ResultFactoryStore(result_factory=result_factory),
|
648
|
+
) as txn:
|
649
|
+
if txn.is_committed():
|
650
|
+
result = txn.read()
|
651
|
+
else:
|
652
|
+
result = await task.fn(*call_args, **call_kwargs) # type: ignore
|
653
|
+
|
654
|
+
# If the task run is successful, finalize it.
|
655
|
+
# do this within the transaction lifecycle
|
656
|
+
# in order to get the proper result serialization
|
657
|
+
run.handle_success(result, transaction=txn)
|
658
|
+
|
659
|
+
except TimeoutError as exc:
|
660
|
+
run.handle_timeout(exc)
|
661
|
+
except Exception as exc:
|
662
|
+
run.handle_exception(exc)
|
663
|
+
|
664
|
+
if run.state.is_final():
|
665
|
+
for hook in run.get_hooks(run.state, as_async=True):
|
666
|
+
await hook()
|
667
|
+
|
668
|
+
if return_type == "state":
|
669
|
+
return run.state
|
670
|
+
return run.result()
|
671
|
+
|
672
|
+
|
673
|
+
def run_task(
|
674
|
+
task: Task[P, Union[R, Coroutine[Any, Any, R]]],
|
675
|
+
task_run_id: Optional[UUID] = None,
|
676
|
+
task_run: Optional[TaskRun] = None,
|
677
|
+
parameters: Optional[Dict[str, Any]] = None,
|
678
|
+
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
679
|
+
return_type: Literal["state", "result"] = "result",
|
680
|
+
dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
|
681
|
+
context: Optional[Dict[str, Any]] = None,
|
682
|
+
) -> Union[R, State, None, Coroutine[Any, Any, Union[R, State, None]]]:
|
683
|
+
"""
|
684
|
+
Runs the provided task.
|
685
|
+
|
686
|
+
Args:
|
687
|
+
task: The task to run
|
688
|
+
task_run_id: The ID of the task run; if not provided, a new task run
|
689
|
+
will be created
|
690
|
+
task_run: The task run object; if not provided, a new task run
|
691
|
+
will be created
|
692
|
+
parameters: The parameters to pass to the task
|
693
|
+
wait_for: A list of futures to wait for before running the task
|
694
|
+
return_type: The return type to return; either "state" or "result"
|
695
|
+
dependencies: A dictionary of task run inputs to use for dependency tracking
|
696
|
+
context: A dictionary containing the context to use for the task run; only
|
697
|
+
required if the task is running on in a remote environment
|
698
|
+
|
699
|
+
Returns:
|
700
|
+
The result of the task run
|
701
|
+
"""
|
702
|
+
kwargs = dict(
|
703
|
+
task=task,
|
704
|
+
task_run_id=task_run_id,
|
705
|
+
task_run=task_run,
|
706
|
+
parameters=parameters,
|
707
|
+
wait_for=wait_for,
|
708
|
+
return_type=return_type,
|
709
|
+
dependencies=dependencies,
|
710
|
+
context=context,
|
711
|
+
)
|
712
|
+
if task.isasync:
|
713
|
+
return run_task_async(**kwargs)
|
714
|
+
else:
|
715
|
+
return run_task_sync(**kwargs)
|