prefect-client 2.19.2__py3-none-any.whl → 3.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/__init__.py +8 -56
- prefect/_internal/compatibility/deprecated.py +6 -115
- prefect/_internal/compatibility/experimental.py +4 -79
- prefect/_internal/concurrency/api.py +0 -34
- prefect/_internal/concurrency/calls.py +0 -6
- prefect/_internal/concurrency/cancellation.py +0 -3
- prefect/_internal/concurrency/event_loop.py +0 -20
- prefect/_internal/concurrency/inspection.py +3 -3
- prefect/_internal/concurrency/threads.py +35 -0
- prefect/_internal/concurrency/waiters.py +0 -28
- prefect/_internal/pydantic/__init__.py +0 -45
- prefect/_internal/pydantic/v1_schema.py +21 -22
- prefect/_internal/pydantic/v2_schema.py +0 -2
- prefect/_internal/pydantic/v2_validated_func.py +18 -23
- prefect/_internal/schemas/bases.py +44 -177
- prefect/_internal/schemas/fields.py +1 -43
- prefect/_internal/schemas/validators.py +60 -158
- prefect/artifacts.py +161 -14
- prefect/automations.py +39 -4
- prefect/blocks/abstract.py +1 -1
- prefect/blocks/core.py +268 -148
- prefect/blocks/fields.py +2 -57
- prefect/blocks/kubernetes.py +8 -12
- prefect/blocks/notifications.py +40 -20
- prefect/blocks/system.py +22 -11
- prefect/blocks/webhook.py +2 -9
- prefect/client/base.py +4 -4
- prefect/client/cloud.py +8 -13
- prefect/client/orchestration.py +347 -341
- prefect/client/schemas/actions.py +92 -86
- prefect/client/schemas/filters.py +20 -40
- prefect/client/schemas/objects.py +151 -145
- prefect/client/schemas/responses.py +16 -24
- prefect/client/schemas/schedules.py +47 -35
- prefect/client/subscriptions.py +2 -2
- prefect/client/utilities.py +5 -2
- prefect/concurrency/asyncio.py +3 -1
- prefect/concurrency/events.py +1 -1
- prefect/concurrency/services.py +6 -3
- prefect/context.py +195 -27
- prefect/deployments/__init__.py +5 -6
- prefect/deployments/base.py +7 -5
- prefect/deployments/flow_runs.py +185 -0
- prefect/deployments/runner.py +50 -45
- prefect/deployments/schedules.py +28 -23
- prefect/deployments/steps/__init__.py +0 -1
- prefect/deployments/steps/core.py +1 -0
- prefect/deployments/steps/pull.py +7 -21
- prefect/engine.py +12 -2422
- prefect/events/actions.py +17 -23
- prefect/events/cli/automations.py +19 -6
- prefect/events/clients.py +14 -37
- prefect/events/filters.py +14 -18
- prefect/events/related.py +2 -2
- prefect/events/schemas/__init__.py +0 -5
- prefect/events/schemas/automations.py +55 -46
- prefect/events/schemas/deployment_triggers.py +7 -197
- prefect/events/schemas/events.py +34 -65
- prefect/events/schemas/labelling.py +10 -14
- prefect/events/utilities.py +2 -3
- prefect/events/worker.py +2 -3
- prefect/filesystems.py +6 -517
- prefect/{new_flow_engine.py → flow_engine.py} +313 -72
- prefect/flow_runs.py +377 -5
- prefect/flows.py +307 -166
- prefect/futures.py +186 -345
- prefect/infrastructure/__init__.py +0 -27
- prefect/infrastructure/provisioners/__init__.py +5 -3
- prefect/infrastructure/provisioners/cloud_run.py +11 -6
- prefect/infrastructure/provisioners/container_instance.py +11 -7
- prefect/infrastructure/provisioners/ecs.py +6 -4
- prefect/infrastructure/provisioners/modal.py +8 -5
- prefect/input/actions.py +2 -4
- prefect/input/run_input.py +5 -7
- prefect/logging/formatters.py +0 -2
- prefect/logging/handlers.py +3 -11
- prefect/logging/loggers.py +2 -2
- prefect/manifests.py +2 -1
- prefect/records/__init__.py +1 -0
- prefect/records/result_store.py +42 -0
- prefect/records/store.py +9 -0
- prefect/results.py +43 -39
- prefect/runner/runner.py +19 -15
- prefect/runner/server.py +6 -10
- prefect/runner/storage.py +3 -8
- prefect/runner/submit.py +2 -2
- prefect/runner/utils.py +2 -2
- prefect/serializers.py +24 -35
- prefect/server/api/collections_data/views/aggregate-worker-metadata.json +5 -14
- prefect/settings.py +70 -133
- prefect/states.py +17 -47
- prefect/task_engine.py +697 -58
- prefect/task_runners.py +269 -301
- prefect/task_server.py +53 -34
- prefect/tasks.py +327 -337
- prefect/transactions.py +220 -0
- prefect/types/__init__.py +61 -82
- prefect/utilities/asyncutils.py +195 -136
- prefect/utilities/callables.py +311 -43
- prefect/utilities/collections.py +23 -38
- prefect/utilities/dispatch.py +11 -3
- prefect/utilities/dockerutils.py +4 -0
- prefect/utilities/engine.py +140 -20
- prefect/utilities/importtools.py +97 -27
- prefect/utilities/pydantic.py +128 -38
- prefect/utilities/schema_tools/hydration.py +5 -1
- prefect/utilities/templating.py +12 -2
- prefect/variables.py +78 -61
- prefect/workers/__init__.py +0 -1
- prefect/workers/base.py +15 -17
- prefect/workers/process.py +3 -8
- prefect/workers/server.py +2 -2
- {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/METADATA +22 -21
- prefect_client-3.0.0rc1.dist-info/RECORD +176 -0
- prefect/_internal/pydantic/_base_model.py +0 -51
- prefect/_internal/pydantic/_compat.py +0 -82
- prefect/_internal/pydantic/_flags.py +0 -20
- prefect/_internal/pydantic/_types.py +0 -8
- prefect/_internal/pydantic/utilities/__init__.py +0 -0
- prefect/_internal/pydantic/utilities/config_dict.py +0 -72
- prefect/_internal/pydantic/utilities/field_validator.py +0 -150
- prefect/_internal/pydantic/utilities/model_construct.py +0 -56
- prefect/_internal/pydantic/utilities/model_copy.py +0 -55
- prefect/_internal/pydantic/utilities/model_dump.py +0 -136
- prefect/_internal/pydantic/utilities/model_dump_json.py +0 -112
- prefect/_internal/pydantic/utilities/model_fields.py +0 -50
- prefect/_internal/pydantic/utilities/model_fields_set.py +0 -29
- prefect/_internal/pydantic/utilities/model_json_schema.py +0 -82
- prefect/_internal/pydantic/utilities/model_rebuild.py +0 -80
- prefect/_internal/pydantic/utilities/model_validate.py +0 -75
- prefect/_internal/pydantic/utilities/model_validate_json.py +0 -68
- prefect/_internal/pydantic/utilities/model_validator.py +0 -87
- prefect/_internal/pydantic/utilities/type_adapter.py +0 -71
- prefect/_vendor/__init__.py +0 -0
- prefect/_vendor/fastapi/__init__.py +0 -25
- prefect/_vendor/fastapi/applications.py +0 -946
- prefect/_vendor/fastapi/background.py +0 -3
- prefect/_vendor/fastapi/concurrency.py +0 -44
- prefect/_vendor/fastapi/datastructures.py +0 -58
- prefect/_vendor/fastapi/dependencies/__init__.py +0 -0
- prefect/_vendor/fastapi/dependencies/models.py +0 -64
- prefect/_vendor/fastapi/dependencies/utils.py +0 -877
- prefect/_vendor/fastapi/encoders.py +0 -177
- prefect/_vendor/fastapi/exception_handlers.py +0 -40
- prefect/_vendor/fastapi/exceptions.py +0 -46
- prefect/_vendor/fastapi/logger.py +0 -3
- prefect/_vendor/fastapi/middleware/__init__.py +0 -1
- prefect/_vendor/fastapi/middleware/asyncexitstack.py +0 -25
- prefect/_vendor/fastapi/middleware/cors.py +0 -3
- prefect/_vendor/fastapi/middleware/gzip.py +0 -3
- prefect/_vendor/fastapi/middleware/httpsredirect.py +0 -3
- prefect/_vendor/fastapi/middleware/trustedhost.py +0 -3
- prefect/_vendor/fastapi/middleware/wsgi.py +0 -3
- prefect/_vendor/fastapi/openapi/__init__.py +0 -0
- prefect/_vendor/fastapi/openapi/constants.py +0 -2
- prefect/_vendor/fastapi/openapi/docs.py +0 -203
- prefect/_vendor/fastapi/openapi/models.py +0 -480
- prefect/_vendor/fastapi/openapi/utils.py +0 -485
- prefect/_vendor/fastapi/param_functions.py +0 -340
- prefect/_vendor/fastapi/params.py +0 -453
- prefect/_vendor/fastapi/requests.py +0 -4
- prefect/_vendor/fastapi/responses.py +0 -40
- prefect/_vendor/fastapi/routing.py +0 -1331
- prefect/_vendor/fastapi/security/__init__.py +0 -15
- prefect/_vendor/fastapi/security/api_key.py +0 -98
- prefect/_vendor/fastapi/security/base.py +0 -6
- prefect/_vendor/fastapi/security/http.py +0 -172
- prefect/_vendor/fastapi/security/oauth2.py +0 -227
- prefect/_vendor/fastapi/security/open_id_connect_url.py +0 -34
- prefect/_vendor/fastapi/security/utils.py +0 -10
- prefect/_vendor/fastapi/staticfiles.py +0 -1
- prefect/_vendor/fastapi/templating.py +0 -3
- prefect/_vendor/fastapi/testclient.py +0 -1
- prefect/_vendor/fastapi/types.py +0 -3
- prefect/_vendor/fastapi/utils.py +0 -235
- prefect/_vendor/fastapi/websockets.py +0 -7
- prefect/_vendor/starlette/__init__.py +0 -1
- prefect/_vendor/starlette/_compat.py +0 -28
- prefect/_vendor/starlette/_exception_handler.py +0 -80
- prefect/_vendor/starlette/_utils.py +0 -88
- prefect/_vendor/starlette/applications.py +0 -261
- prefect/_vendor/starlette/authentication.py +0 -159
- prefect/_vendor/starlette/background.py +0 -43
- prefect/_vendor/starlette/concurrency.py +0 -59
- prefect/_vendor/starlette/config.py +0 -151
- prefect/_vendor/starlette/convertors.py +0 -87
- prefect/_vendor/starlette/datastructures.py +0 -707
- prefect/_vendor/starlette/endpoints.py +0 -130
- prefect/_vendor/starlette/exceptions.py +0 -60
- prefect/_vendor/starlette/formparsers.py +0 -276
- prefect/_vendor/starlette/middleware/__init__.py +0 -17
- prefect/_vendor/starlette/middleware/authentication.py +0 -52
- prefect/_vendor/starlette/middleware/base.py +0 -220
- prefect/_vendor/starlette/middleware/cors.py +0 -176
- prefect/_vendor/starlette/middleware/errors.py +0 -265
- prefect/_vendor/starlette/middleware/exceptions.py +0 -74
- prefect/_vendor/starlette/middleware/gzip.py +0 -113
- prefect/_vendor/starlette/middleware/httpsredirect.py +0 -19
- prefect/_vendor/starlette/middleware/sessions.py +0 -82
- prefect/_vendor/starlette/middleware/trustedhost.py +0 -64
- prefect/_vendor/starlette/middleware/wsgi.py +0 -147
- prefect/_vendor/starlette/requests.py +0 -328
- prefect/_vendor/starlette/responses.py +0 -347
- prefect/_vendor/starlette/routing.py +0 -933
- prefect/_vendor/starlette/schemas.py +0 -154
- prefect/_vendor/starlette/staticfiles.py +0 -248
- prefect/_vendor/starlette/status.py +0 -199
- prefect/_vendor/starlette/templating.py +0 -231
- prefect/_vendor/starlette/testclient.py +0 -804
- prefect/_vendor/starlette/types.py +0 -30
- prefect/_vendor/starlette/websockets.py +0 -193
- prefect/agent.py +0 -698
- prefect/deployments/deployments.py +0 -1042
- prefect/deprecated/__init__.py +0 -0
- prefect/deprecated/data_documents.py +0 -350
- prefect/deprecated/packaging/__init__.py +0 -12
- prefect/deprecated/packaging/base.py +0 -96
- prefect/deprecated/packaging/docker.py +0 -146
- prefect/deprecated/packaging/file.py +0 -92
- prefect/deprecated/packaging/orion.py +0 -80
- prefect/deprecated/packaging/serializers.py +0 -171
- prefect/events/instrument.py +0 -135
- prefect/infrastructure/base.py +0 -323
- prefect/infrastructure/container.py +0 -818
- prefect/infrastructure/kubernetes.py +0 -920
- prefect/infrastructure/process.py +0 -289
- prefect/new_task_engine.py +0 -423
- prefect/pydantic/__init__.py +0 -76
- prefect/pydantic/main.py +0 -39
- prefect/software/__init__.py +0 -2
- prefect/software/base.py +0 -50
- prefect/software/conda.py +0 -199
- prefect/software/pip.py +0 -122
- prefect/software/python.py +0 -52
- prefect/workers/block.py +0 -218
- prefect_client-2.19.2.dist-info/RECORD +0 -292
- {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/LICENSE +0 -0
- {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/WHEEL +0 -0
- {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/top_level.txt +0 -0
prefect/tasks.py
CHANGED
@@ -14,7 +14,6 @@ from typing import (
|
|
14
14
|
Any,
|
15
15
|
Awaitable,
|
16
16
|
Callable,
|
17
|
-
Coroutine,
|
18
17
|
Dict,
|
19
18
|
Generic,
|
20
19
|
Iterable,
|
@@ -22,17 +21,17 @@ from typing import (
|
|
22
21
|
NoReturn,
|
23
22
|
Optional,
|
24
23
|
Set,
|
24
|
+
Tuple,
|
25
25
|
TypeVar,
|
26
26
|
Union,
|
27
27
|
cast,
|
28
28
|
overload,
|
29
29
|
)
|
30
|
-
from uuid import uuid4
|
30
|
+
from uuid import UUID, uuid4
|
31
31
|
|
32
32
|
from typing_extensions import Literal, ParamSpec
|
33
33
|
|
34
|
-
from prefect.
|
35
|
-
from prefect.client.orchestration import PrefectClient, SyncPrefectClient
|
34
|
+
from prefect.client.orchestration import get_client
|
36
35
|
from prefect.client.schemas import TaskRun
|
37
36
|
from prefect.client.schemas.objects import TaskRunInput, TaskRunResult
|
38
37
|
from prefect.context import (
|
@@ -40,40 +39,38 @@ from prefect.context import (
|
|
40
39
|
PrefectObjectRegistry,
|
41
40
|
TagsContext,
|
42
41
|
TaskRunContext,
|
42
|
+
serialize_context,
|
43
43
|
)
|
44
|
-
from prefect.futures import PrefectFuture
|
45
|
-
from prefect.logging.loggers import get_logger
|
46
|
-
from prefect.results import ResultSerializer, ResultStorage
|
44
|
+
from prefect.futures import PrefectDistributedFuture, PrefectFuture
|
45
|
+
from prefect.logging.loggers import get_logger
|
46
|
+
from prefect.results import ResultFactory, ResultSerializer, ResultStorage
|
47
47
|
from prefect.settings import (
|
48
|
-
PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE,
|
49
|
-
PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING,
|
50
48
|
PREFECT_TASK_DEFAULT_RETRIES,
|
51
49
|
PREFECT_TASK_DEFAULT_RETRY_DELAY_SECONDS,
|
52
50
|
)
|
53
|
-
from prefect.states import Pending, State
|
54
|
-
from prefect.task_runners import BaseTaskRunner
|
51
|
+
from prefect.states import Pending, Scheduled, State
|
55
52
|
from prefect.utilities.annotations import NotSet
|
56
|
-
from prefect.utilities.asyncutils import
|
53
|
+
from prefect.utilities.asyncutils import run_coro_as_sync
|
57
54
|
from prefect.utilities.callables import (
|
55
|
+
expand_mapping_parameters,
|
58
56
|
get_call_parameters,
|
59
57
|
raise_for_reserved_arguments,
|
60
58
|
)
|
61
59
|
from prefect.utilities.hashing import hash_objects
|
62
60
|
from prefect.utilities.importtools import to_qualified_name
|
63
|
-
from prefect.utilities.visualization import (
|
64
|
-
VisualizationUnsupportedError,
|
65
|
-
get_task_viz_tracker,
|
66
|
-
track_viz_task,
|
67
|
-
)
|
68
61
|
|
69
62
|
if TYPE_CHECKING:
|
63
|
+
from prefect.client.orchestration import PrefectClient
|
70
64
|
from prefect.context import TaskRunContext
|
71
|
-
|
65
|
+
from prefect.task_runners import BaseTaskRunner
|
66
|
+
from prefect.transactions import Transaction
|
72
67
|
|
73
68
|
T = TypeVar("T") # Generic type var for capturing the inner return type of async funcs
|
74
69
|
R = TypeVar("R") # The return type of the user's function
|
75
70
|
P = ParamSpec("P") # The parameters of the task
|
76
71
|
|
72
|
+
NUM_CHARS_DYNAMIC_KEY = 8
|
73
|
+
|
77
74
|
logger = get_logger("tasks")
|
78
75
|
|
79
76
|
|
@@ -189,6 +186,8 @@ class Task(Generic[P, R]):
|
|
189
186
|
execution with matching cache key is used.
|
190
187
|
on_failure: An optional list of callables to run when the task enters a failed state.
|
191
188
|
on_completion: An optional list of callables to run when the task enters a completed state.
|
189
|
+
on_commit: An optional list of callables to run when the task's idempotency record is committed.
|
190
|
+
on_rollback: An optional list of callables to run when the task rolls back.
|
192
191
|
retry_condition_fn: An optional callable run when a task run returns a Failed state. Should
|
193
192
|
return `True` if the task should continue to its retry policy (e.g. `retries=3`), and `False` if the task
|
194
193
|
should end as failed. Defaults to `None`, indicating the task should always continue
|
@@ -230,6 +229,8 @@ class Task(Generic[P, R]):
|
|
230
229
|
refresh_cache: Optional[bool] = None,
|
231
230
|
on_completion: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
|
232
231
|
on_failure: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
|
232
|
+
on_rollback: Optional[List[Callable[["Transaction"], None]]] = None,
|
233
|
+
on_commit: Optional[List[Callable[["Transaction"], None]]] = None,
|
233
234
|
retry_condition_fn: Optional[Callable[["Task", TaskRun, State], bool]] = None,
|
234
235
|
viz_return_value: Optional[Any] = None,
|
235
236
|
):
|
@@ -238,8 +239,6 @@ class Task(Generic[P, R]):
|
|
238
239
|
hook_names = ["on_completion", "on_failure"]
|
239
240
|
for hooks, hook_name in zip(hook_categories, hook_names):
|
240
241
|
if hooks is not None:
|
241
|
-
if not hooks:
|
242
|
-
raise ValueError(f"Empty list passed for '{hook_name}'")
|
243
242
|
try:
|
244
243
|
hooks = list(hooks)
|
245
244
|
except TypeError:
|
@@ -247,8 +246,8 @@ class Task(Generic[P, R]):
|
|
247
246
|
f"Expected iterable for '{hook_name}'; got"
|
248
247
|
f" {type(hooks).__name__} instead. Please provide a list of"
|
249
248
|
f" hooks to '{hook_name}':\n\n"
|
250
|
-
f"@
|
251
|
-
"
|
249
|
+
f"@task({hook_name}=[hook1, hook2])\ndef"
|
250
|
+
" my_task():\n\tpass"
|
252
251
|
)
|
253
252
|
|
254
253
|
for hook in hooks:
|
@@ -257,8 +256,8 @@ class Task(Generic[P, R]):
|
|
257
256
|
f"Expected callables in '{hook_name}'; got"
|
258
257
|
f" {type(hook).__name__} instead. Please provide a list of"
|
259
258
|
f" hooks to '{hook_name}':\n\n"
|
260
|
-
f"@
|
261
|
-
"
|
259
|
+
f"@task({hook_name}=[hook1, hook2])\ndef"
|
260
|
+
" my_task():\n\tpass"
|
262
261
|
)
|
263
262
|
|
264
263
|
if not callable(fn):
|
@@ -338,8 +337,10 @@ class Task(Generic[P, R]):
|
|
338
337
|
self.result_storage_key = result_storage_key
|
339
338
|
self.cache_result_in_memory = cache_result_in_memory
|
340
339
|
self.timeout_seconds = float(timeout_seconds) if timeout_seconds else None
|
341
|
-
self.
|
342
|
-
self.
|
340
|
+
self.on_rollback_hooks = on_rollback or []
|
341
|
+
self.on_commit_hooks = on_commit or []
|
342
|
+
self.on_completion_hooks = on_completion or []
|
343
|
+
self.on_failure_hooks = on_failure or []
|
343
344
|
|
344
345
|
# retry_condition_fn must be a callable or None. If it is neither, raise a TypeError
|
345
346
|
if retry_condition_fn is not None and not (callable(retry_condition_fn)):
|
@@ -510,25 +511,50 @@ class Task(Generic[P, R]):
|
|
510
511
|
refresh_cache=(
|
511
512
|
refresh_cache if refresh_cache is not NotSet else self.refresh_cache
|
512
513
|
),
|
513
|
-
on_completion=on_completion or self.
|
514
|
-
on_failure=on_failure or self.
|
514
|
+
on_completion=on_completion or self.on_completion_hooks,
|
515
|
+
on_failure=on_failure or self.on_failure_hooks,
|
515
516
|
retry_condition_fn=retry_condition_fn or self.retry_condition_fn,
|
516
517
|
viz_return_value=viz_return_value or self.viz_return_value,
|
517
518
|
)
|
518
519
|
|
520
|
+
def on_completion(
|
521
|
+
self, fn: Callable[["Task", TaskRun, State], None]
|
522
|
+
) -> Callable[["Task", TaskRun, State], None]:
|
523
|
+
self.on_completion_hooks.append(fn)
|
524
|
+
return fn
|
525
|
+
|
526
|
+
def on_failure(
|
527
|
+
self, fn: Callable[["Task", TaskRun, State], None]
|
528
|
+
) -> Callable[["Task", TaskRun, State], None]:
|
529
|
+
self.on_failure_hooks.append(fn)
|
530
|
+
return fn
|
531
|
+
|
532
|
+
def on_commit(
|
533
|
+
self, fn: Callable[["Transaction"], None]
|
534
|
+
) -> Callable[["Transaction"], None]:
|
535
|
+
self.on_commit_hooks.append(fn)
|
536
|
+
return fn
|
537
|
+
|
538
|
+
def on_rollback(
|
539
|
+
self, fn: Callable[["Transaction"], None]
|
540
|
+
) -> Callable[["Transaction"], None]:
|
541
|
+
self.on_rollback_hooks.append(fn)
|
542
|
+
return fn
|
543
|
+
|
519
544
|
async def create_run(
|
520
545
|
self,
|
521
|
-
client: Optional[
|
522
|
-
|
546
|
+
client: Optional["PrefectClient"] = None,
|
547
|
+
id: Optional[UUID] = None,
|
548
|
+
parameters: Optional[Dict[str, Any]] = None,
|
523
549
|
flow_run_context: Optional[FlowRunContext] = None,
|
524
550
|
parent_task_run_context: Optional[TaskRunContext] = None,
|
525
551
|
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
526
552
|
extra_task_inputs: Optional[Dict[str, Set[TaskRunInput]]] = None,
|
553
|
+
deferred: bool = False,
|
527
554
|
) -> TaskRun:
|
528
555
|
from prefect.utilities.engine import (
|
529
556
|
_dynamic_key_for_task_run,
|
530
|
-
|
531
|
-
collect_task_run_inputs,
|
557
|
+
collect_task_run_inputs_sync,
|
532
558
|
)
|
533
559
|
|
534
560
|
if flow_run_context is None:
|
@@ -537,76 +563,96 @@ class Task(Generic[P, R]):
|
|
537
563
|
parent_task_run_context = TaskRunContext.get()
|
538
564
|
if parameters is None:
|
539
565
|
parameters = {}
|
566
|
+
if client is None:
|
567
|
+
client = get_client()
|
540
568
|
|
541
|
-
|
542
|
-
task_run_name = _resolve_custom_task_run_name(self, parameters)
|
543
|
-
except TypeError:
|
544
|
-
task_run_name = None
|
545
|
-
|
546
|
-
if flow_run_context:
|
547
|
-
dynamic_key = _dynamic_key_for_task_run(context=flow_run_context, task=self)
|
548
|
-
else:
|
549
|
-
dynamic_key = uuid4().hex
|
550
|
-
|
551
|
-
# collect task inputs
|
552
|
-
task_inputs = {
|
553
|
-
k: await collect_task_run_inputs(v) for k, v in parameters.items()
|
554
|
-
}
|
555
|
-
|
556
|
-
# check if this task has a parent task run based on running in another
|
557
|
-
# task run's existing context. A task run is only considered a parent if
|
558
|
-
# it is in the same flow run (because otherwise presumably the child is
|
559
|
-
# in a subflow, so the subflow serves as the parent) or if there is no
|
560
|
-
# flow run
|
561
|
-
if parent_task_run_context:
|
562
|
-
# there is no flow run
|
569
|
+
async with client:
|
563
570
|
if not flow_run_context:
|
564
|
-
|
565
|
-
|
566
|
-
|
567
|
-
|
568
|
-
|
569
|
-
|
570
|
-
|
571
|
-
== flow_run_context.flow_run.id
|
572
|
-
):
|
573
|
-
task_inputs["__parents__"] = [
|
574
|
-
TaskRunResult(id=parent_task_run_context.task_run.id)
|
575
|
-
]
|
576
|
-
|
577
|
-
if wait_for:
|
578
|
-
task_inputs["wait_for"] = await collect_task_run_inputs(wait_for)
|
579
|
-
|
580
|
-
# Join extra task inputs
|
581
|
-
for k, extras in (extra_task_inputs or {}).items():
|
582
|
-
task_inputs[k] = task_inputs[k].union(extras)
|
583
|
-
|
584
|
-
# create the task run
|
585
|
-
task_run = client.create_task_run(
|
586
|
-
task=self,
|
587
|
-
name=task_run_name,
|
588
|
-
flow_run_id=(
|
589
|
-
getattr(flow_run_context.flow_run, "id", None)
|
590
|
-
if flow_run_context and flow_run_context.flow_run
|
591
|
-
else None
|
592
|
-
),
|
593
|
-
dynamic_key=str(dynamic_key),
|
594
|
-
state=Pending(),
|
595
|
-
task_inputs=task_inputs,
|
596
|
-
extra_tags=TagsContext.get().current_tags,
|
597
|
-
)
|
598
|
-
# the new engine uses sync clients but old engines use async clients
|
599
|
-
if inspect.isawaitable(task_run):
|
600
|
-
task_run = await task_run
|
571
|
+
dynamic_key = f"{self.task_key}-{str(uuid4().hex)}"
|
572
|
+
task_run_name = f"{self.name}-{dynamic_key[:NUM_CHARS_DYNAMIC_KEY]}"
|
573
|
+
else:
|
574
|
+
dynamic_key = _dynamic_key_for_task_run(
|
575
|
+
context=flow_run_context, task=self
|
576
|
+
)
|
577
|
+
task_run_name = f"{self.name}-{dynamic_key}"
|
601
578
|
|
602
|
-
|
603
|
-
|
604
|
-
|
579
|
+
if deferred:
|
580
|
+
state = Scheduled()
|
581
|
+
state.state_details.deferred = True
|
582
|
+
else:
|
583
|
+
state = Pending()
|
584
|
+
|
585
|
+
# store parameters for background tasks so that task servers
|
586
|
+
# can retrieve them at runtime
|
587
|
+
if deferred and (parameters or wait_for):
|
588
|
+
parameters_id = uuid4()
|
589
|
+
state.state_details.task_parameters_id = parameters_id
|
590
|
+
|
591
|
+
# TODO: Improve use of result storage for parameter storage / reference
|
592
|
+
self.persist_result = True
|
593
|
+
|
594
|
+
factory = await ResultFactory.from_autonomous_task(self, client=client)
|
595
|
+
context = serialize_context()
|
596
|
+
data: Dict[str, Any] = {"context": context}
|
597
|
+
if parameters:
|
598
|
+
data["parameters"] = parameters
|
599
|
+
if wait_for:
|
600
|
+
data["wait_for"] = wait_for
|
601
|
+
await factory.store_parameters(parameters_id, data)
|
602
|
+
|
603
|
+
# collect task inputs
|
604
|
+
task_inputs = {
|
605
|
+
k: collect_task_run_inputs_sync(v) for k, v in parameters.items()
|
606
|
+
}
|
607
|
+
|
608
|
+
# check if this task has a parent task run based on running in another
|
609
|
+
# task run's existing context. A task run is only considered a parent if
|
610
|
+
# it is in the same flow run (because otherwise presumably the child is
|
611
|
+
# in a subflow, so the subflow serves as the parent) or if there is no
|
612
|
+
# flow run
|
613
|
+
if parent_task_run_context:
|
614
|
+
# there is no flow run
|
615
|
+
if not flow_run_context:
|
616
|
+
task_inputs["__parents__"] = [
|
617
|
+
TaskRunResult(id=parent_task_run_context.task_run.id)
|
618
|
+
]
|
619
|
+
# there is a flow run and the task run is in the same flow run
|
620
|
+
elif (
|
621
|
+
flow_run_context
|
622
|
+
and parent_task_run_context.task_run.flow_run_id
|
623
|
+
== getattr(flow_run_context.flow_run, "id", None)
|
624
|
+
):
|
625
|
+
task_inputs["__parents__"] = [
|
626
|
+
TaskRunResult(id=parent_task_run_context.task_run.id)
|
627
|
+
]
|
628
|
+
|
629
|
+
if wait_for:
|
630
|
+
task_inputs["wait_for"] = collect_task_run_inputs_sync(wait_for)
|
631
|
+
|
632
|
+
# Join extra task inputs
|
633
|
+
for k, extras in (extra_task_inputs or {}).items():
|
634
|
+
task_inputs[k] = task_inputs[k].union(extras)
|
635
|
+
|
636
|
+
# create the task run
|
637
|
+
task_run = client.create_task_run(
|
638
|
+
task=self,
|
639
|
+
name=task_run_name,
|
640
|
+
flow_run_id=(
|
641
|
+
getattr(flow_run_context.flow_run, "id", None)
|
642
|
+
if flow_run_context and flow_run_context.flow_run
|
643
|
+
else None
|
644
|
+
),
|
645
|
+
dynamic_key=str(dynamic_key),
|
646
|
+
id=id,
|
647
|
+
state=state,
|
648
|
+
task_inputs=task_inputs,
|
649
|
+
extra_tags=TagsContext.get().current_tags,
|
605
650
|
)
|
606
|
-
|
607
|
-
|
651
|
+
# the new engine uses sync clients but old engines use async clients
|
652
|
+
if inspect.isawaitable(task_run):
|
653
|
+
task_run = await task_run
|
608
654
|
|
609
|
-
|
655
|
+
return task_run
|
610
656
|
|
611
657
|
@overload
|
612
658
|
def __call__(
|
@@ -646,9 +692,10 @@ class Task(Generic[P, R]):
|
|
646
692
|
Run the task and return the result. If `return_state` is True returns
|
647
693
|
the result is wrapped in a Prefect State which provides error handling.
|
648
694
|
"""
|
649
|
-
from prefect.
|
650
|
-
|
651
|
-
|
695
|
+
from prefect.utilities.visualization import (
|
696
|
+
get_task_viz_tracker,
|
697
|
+
track_viz_task,
|
698
|
+
)
|
652
699
|
|
653
700
|
# Convert the call args/kwargs to a parameter dict
|
654
701
|
parameters = get_call_parameters(self.fn, args, kwargs)
|
@@ -661,88 +708,13 @@ class Task(Generic[P, R]):
|
|
661
708
|
self.isasync, self.name, parameters, self.viz_return_value
|
662
709
|
)
|
663
710
|
|
664
|
-
|
665
|
-
from prefect.new_task_engine import run_task
|
711
|
+
from prefect.task_engine import run_task
|
666
712
|
|
667
|
-
|
668
|
-
|
669
|
-
parameters=parameters,
|
670
|
-
wait_for=wait_for,
|
671
|
-
return_type=return_type,
|
672
|
-
)
|
673
|
-
|
674
|
-
if (
|
675
|
-
PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
|
676
|
-
and not FlowRunContext.get()
|
677
|
-
):
|
678
|
-
from prefect import get_client
|
679
|
-
|
680
|
-
return submit_autonomous_task_run_to_engine(
|
681
|
-
task=self,
|
682
|
-
task_run=None,
|
683
|
-
task_runner=SequentialTaskRunner(),
|
684
|
-
parameters=parameters,
|
685
|
-
return_type=return_type,
|
686
|
-
client=get_client(),
|
687
|
-
)
|
688
|
-
|
689
|
-
return enter_task_run_engine(
|
690
|
-
self,
|
713
|
+
return run_task(
|
714
|
+
task=self,
|
691
715
|
parameters=parameters,
|
692
716
|
wait_for=wait_for,
|
693
|
-
task_runner=SequentialTaskRunner(),
|
694
717
|
return_type=return_type,
|
695
|
-
mapped=False,
|
696
|
-
)
|
697
|
-
|
698
|
-
@overload
|
699
|
-
def _run(
|
700
|
-
self: "Task[P, NoReturn]",
|
701
|
-
*args: P.args,
|
702
|
-
**kwargs: P.kwargs,
|
703
|
-
) -> PrefectFuture[None, Sync]:
|
704
|
-
# `NoReturn` matches if a type can't be inferred for the function which stops a
|
705
|
-
# sync function from matching the `Coroutine` overload
|
706
|
-
...
|
707
|
-
|
708
|
-
@overload
|
709
|
-
def _run(
|
710
|
-
self: "Task[P, Coroutine[Any, Any, T]]",
|
711
|
-
*args: P.args,
|
712
|
-
**kwargs: P.kwargs,
|
713
|
-
) -> Awaitable[State[T]]:
|
714
|
-
...
|
715
|
-
|
716
|
-
@overload
|
717
|
-
def _run(
|
718
|
-
self: "Task[P, T]",
|
719
|
-
*args: P.args,
|
720
|
-
**kwargs: P.kwargs,
|
721
|
-
) -> State[T]:
|
722
|
-
...
|
723
|
-
|
724
|
-
def _run(
|
725
|
-
self,
|
726
|
-
*args: P.args,
|
727
|
-
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
728
|
-
**kwargs: P.kwargs,
|
729
|
-
) -> Union[State, Awaitable[State]]:
|
730
|
-
"""
|
731
|
-
Run the task and return the final state.
|
732
|
-
"""
|
733
|
-
from prefect.engine import enter_task_run_engine
|
734
|
-
from prefect.task_runners import SequentialTaskRunner
|
735
|
-
|
736
|
-
# Convert the call args/kwargs to a parameter dict
|
737
|
-
parameters = get_call_parameters(self.fn, args, kwargs)
|
738
|
-
|
739
|
-
return enter_task_run_engine(
|
740
|
-
self,
|
741
|
-
parameters=parameters,
|
742
|
-
wait_for=wait_for,
|
743
|
-
return_type="state",
|
744
|
-
task_runner=SequentialTaskRunner(),
|
745
|
-
mapped=False,
|
746
718
|
)
|
747
719
|
|
748
720
|
@overload
|
@@ -750,50 +722,27 @@ class Task(Generic[P, R]):
|
|
750
722
|
self: "Task[P, NoReturn]",
|
751
723
|
*args: P.args,
|
752
724
|
**kwargs: P.kwargs,
|
753
|
-
) -> PrefectFuture
|
725
|
+
) -> PrefectFuture:
|
754
726
|
# `NoReturn` matches if a type can't be inferred for the function which stops a
|
755
727
|
# sync function from matching the `Coroutine` overload
|
756
728
|
...
|
757
729
|
|
758
|
-
@overload
|
759
|
-
def submit(
|
760
|
-
self: "Task[P, Coroutine[Any, Any, T]]",
|
761
|
-
*args: P.args,
|
762
|
-
**kwargs: P.kwargs,
|
763
|
-
) -> Awaitable[PrefectFuture[T, Async]]:
|
764
|
-
...
|
765
|
-
|
766
730
|
@overload
|
767
731
|
def submit(
|
768
732
|
self: "Task[P, T]",
|
769
733
|
*args: P.args,
|
770
734
|
**kwargs: P.kwargs,
|
771
|
-
) -> PrefectFuture
|
735
|
+
) -> PrefectFuture:
|
772
736
|
...
|
773
737
|
|
774
738
|
@overload
|
775
739
|
def submit(
|
776
740
|
self: "Task[P, T]",
|
777
|
-
*args: P.args,
|
778
741
|
return_state: Literal[True],
|
779
|
-
|
780
|
-
) -> State[T]:
|
781
|
-
...
|
782
|
-
|
783
|
-
@overload
|
784
|
-
def submit(
|
785
|
-
self: "Task[P, T]",
|
786
|
-
*args: P.args,
|
787
|
-
**kwargs: P.kwargs,
|
788
|
-
) -> TaskRun:
|
789
|
-
...
|
790
|
-
|
791
|
-
@overload
|
792
|
-
def submit(
|
793
|
-
self: "Task[P, Coroutine[Any, Any, T]]",
|
742
|
+
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
794
743
|
*args: P.args,
|
795
744
|
**kwargs: P.kwargs,
|
796
|
-
) ->
|
745
|
+
) -> State[T]:
|
797
746
|
...
|
798
747
|
|
799
748
|
def submit(
|
@@ -802,19 +751,15 @@ class Task(Generic[P, R]):
|
|
802
751
|
return_state: bool = False,
|
803
752
|
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
804
753
|
**kwargs: Any,
|
805
|
-
)
|
754
|
+
):
|
806
755
|
"""
|
807
756
|
Submit a run of the task to the engine.
|
808
757
|
|
809
758
|
If writing an async task, this call must be awaited.
|
810
759
|
|
811
|
-
If called from within a flow function,
|
812
|
-
|
813
760
|
Will create a new task run in the backing API and submit the task to the flow's
|
814
761
|
task runner. This call only blocks execution while the task is being submitted,
|
815
|
-
once it is submitted, the flow function will continue executing.
|
816
|
-
that the `SequentialTaskRunner` does not implement parallel execution for sync tasks
|
817
|
-
and they are fully resolved on submission.
|
762
|
+
once it is submitted, the flow function will continue executing.
|
818
763
|
|
819
764
|
Args:
|
820
765
|
*args: Arguments to run the task with
|
@@ -894,97 +839,29 @@ class Task(Generic[P, R]):
|
|
894
839
|
|
895
840
|
"""
|
896
841
|
|
897
|
-
from prefect.
|
842
|
+
from prefect.utilities.visualization import (
|
843
|
+
VisualizationUnsupportedError,
|
844
|
+
get_task_viz_tracker,
|
845
|
+
)
|
898
846
|
|
899
847
|
# Convert the call args/kwargs to a parameter dict
|
900
848
|
parameters = get_call_parameters(self.fn, args, kwargs)
|
901
|
-
return_type = "state" if return_state else "future"
|
902
849
|
flow_run_context = FlowRunContext.get()
|
903
850
|
|
851
|
+
if not flow_run_context:
|
852
|
+
raise ValueError("Task.submit() must be called within a flow")
|
853
|
+
|
904
854
|
task_viz_tracker = get_task_viz_tracker()
|
905
855
|
if task_viz_tracker:
|
906
856
|
raise VisualizationUnsupportedError(
|
907
857
|
"`task.submit()` is not currently supported by `flow.visualize()`"
|
908
858
|
)
|
909
859
|
|
910
|
-
if PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING and not flow_run_context:
|
911
|
-
create_autonomous_task_run_call = create_call(
|
912
|
-
create_autonomous_task_run, task=self, parameters=parameters
|
913
|
-
)
|
914
|
-
if self.isasync:
|
915
|
-
return from_async.wait_for_call_in_loop_thread(
|
916
|
-
create_autonomous_task_run_call
|
917
|
-
)
|
918
|
-
else:
|
919
|
-
return from_sync.wait_for_call_in_loop_thread(
|
920
|
-
create_autonomous_task_run_call
|
921
|
-
)
|
922
|
-
if PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE and flow_run_context:
|
923
|
-
if self.isasync:
|
924
|
-
return self._submit_async(
|
925
|
-
parameters=parameters,
|
926
|
-
flow_run_context=flow_run_context,
|
927
|
-
wait_for=wait_for,
|
928
|
-
return_state=return_state,
|
929
|
-
)
|
930
|
-
else:
|
931
|
-
raise NotImplementedError(
|
932
|
-
"Submitting sync tasks with the new engine has not be implemented yet."
|
933
|
-
)
|
934
|
-
|
935
|
-
else:
|
936
|
-
return enter_task_run_engine(
|
937
|
-
self,
|
938
|
-
parameters=parameters,
|
939
|
-
wait_for=wait_for,
|
940
|
-
return_type=return_type,
|
941
|
-
task_runner=None, # Use the flow's task runner
|
942
|
-
mapped=False,
|
943
|
-
)
|
944
|
-
|
945
|
-
async def _submit_async(
|
946
|
-
self,
|
947
|
-
parameters: Dict[str, Any],
|
948
|
-
flow_run_context: FlowRunContext,
|
949
|
-
wait_for: Optional[Iterable[PrefectFuture]],
|
950
|
-
return_state: bool,
|
951
|
-
):
|
952
|
-
from prefect.new_task_engine import run_task_async
|
953
|
-
|
954
860
|
task_runner = flow_run_context.task_runner
|
955
|
-
|
956
|
-
task_run = await self.create_run(
|
957
|
-
client=flow_run_context.client,
|
958
|
-
flow_run_context=flow_run_context,
|
959
|
-
parameters=parameters,
|
960
|
-
wait_for=wait_for,
|
961
|
-
)
|
962
|
-
|
963
|
-
future = PrefectFuture(
|
964
|
-
name=task_run.name,
|
965
|
-
key=uuid4(),
|
966
|
-
task_runner=task_runner,
|
967
|
-
asynchronous=(self.isasync and flow_run_context.flow.isasync),
|
968
|
-
)
|
969
|
-
future.task_run = task_run
|
970
|
-
flow_run_context.task_run_futures.append(future)
|
971
|
-
await task_runner.submit(
|
972
|
-
key=future.key,
|
973
|
-
call=partial(
|
974
|
-
run_task_async,
|
975
|
-
task=self,
|
976
|
-
task_run=task_run,
|
977
|
-
parameters=parameters,
|
978
|
-
wait_for=wait_for,
|
979
|
-
return_type="state",
|
980
|
-
),
|
981
|
-
)
|
982
|
-
# TODO: I don't like this. Can we move responsibility for creating the future
|
983
|
-
# and setting this anyio.Event to the task runner?
|
984
|
-
future._submitted.set()
|
985
|
-
|
861
|
+
future = task_runner.submit(self, parameters, wait_for)
|
986
862
|
if return_state:
|
987
|
-
|
863
|
+
future.wait()
|
864
|
+
return future.state
|
988
865
|
else:
|
989
866
|
return future
|
990
867
|
|
@@ -993,32 +870,24 @@ class Task(Generic[P, R]):
|
|
993
870
|
self: "Task[P, NoReturn]",
|
994
871
|
*args: P.args,
|
995
872
|
**kwargs: P.kwargs,
|
996
|
-
) -> List[PrefectFuture
|
873
|
+
) -> List[PrefectFuture]:
|
997
874
|
# `NoReturn` matches if a type can't be inferred for the function which stops a
|
998
875
|
# sync function from matching the `Coroutine` overload
|
999
876
|
...
|
1000
877
|
|
1001
|
-
@overload
|
1002
|
-
def map(
|
1003
|
-
self: "Task[P, Coroutine[Any, Any, T]]",
|
1004
|
-
*args: P.args,
|
1005
|
-
**kwargs: P.kwargs,
|
1006
|
-
) -> Awaitable[List[PrefectFuture[T, Async]]]:
|
1007
|
-
...
|
1008
|
-
|
1009
878
|
@overload
|
1010
879
|
def map(
|
1011
880
|
self: "Task[P, T]",
|
1012
881
|
*args: P.args,
|
1013
882
|
**kwargs: P.kwargs,
|
1014
|
-
) -> List[PrefectFuture
|
883
|
+
) -> List[PrefectFuture]:
|
1015
884
|
...
|
1016
885
|
|
1017
886
|
@overload
|
1018
887
|
def map(
|
1019
888
|
self: "Task[P, T]",
|
1020
|
-
*args: P.args,
|
1021
889
|
return_state: Literal[True],
|
890
|
+
*args: P.args,
|
1022
891
|
**kwargs: P.kwargs,
|
1023
892
|
) -> List[State[T]]:
|
1024
893
|
...
|
@@ -1029,7 +898,7 @@ class Task(Generic[P, R]):
|
|
1029
898
|
return_state: bool = False,
|
1030
899
|
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
1031
900
|
**kwargs: Any,
|
1032
|
-
)
|
901
|
+
):
|
1033
902
|
"""
|
1034
903
|
Submit a mapped run of the task to a worker.
|
1035
904
|
|
@@ -1044,9 +913,7 @@ class Task(Generic[P, R]):
|
|
1044
913
|
backing API and submit the task runs to the flow's task runner. This
|
1045
914
|
call blocks if given a future as input while the future is resolved. It
|
1046
915
|
also blocks while the tasks are being submitted, once they are
|
1047
|
-
submitted, the flow function will continue executing.
|
1048
|
-
that the `SequentialTaskRunner` does not implement parallel execution
|
1049
|
-
for sync tasks and they are fully resolved on submission.
|
916
|
+
submitted, the flow function will continue executing.
|
1050
917
|
|
1051
918
|
Args:
|
1052
919
|
*args: Iterable and static arguments to run the tasks with
|
@@ -1143,12 +1010,15 @@ class Task(Generic[P, R]):
|
|
1143
1010
|
[[11, 21], [12, 22], [13, 23]]
|
1144
1011
|
"""
|
1145
1012
|
|
1146
|
-
from prefect.
|
1013
|
+
from prefect.utilities.visualization import (
|
1014
|
+
VisualizationUnsupportedError,
|
1015
|
+
get_task_viz_tracker,
|
1016
|
+
)
|
1147
1017
|
|
1148
1018
|
# Convert the call args/kwargs to a parameter dict; do not apply defaults
|
1149
1019
|
# since they should not be mapped over
|
1150
1020
|
parameters = get_call_parameters(self.fn, args, kwargs, apply_defaults=False)
|
1151
|
-
|
1021
|
+
flow_run_context = FlowRunContext.get()
|
1152
1022
|
|
1153
1023
|
task_viz_tracker = get_task_viz_tracker()
|
1154
1024
|
if task_viz_tracker:
|
@@ -1156,35 +1026,162 @@ class Task(Generic[P, R]):
|
|
1156
1026
|
"`task.map()` is not currently supported by `flow.visualize()`"
|
1157
1027
|
)
|
1158
1028
|
|
1159
|
-
if
|
1160
|
-
|
1161
|
-
|
1162
|
-
|
1163
|
-
|
1164
|
-
|
1165
|
-
|
1029
|
+
if not flow_run_context:
|
1030
|
+
# TODO: Should we split out background task mapping into a separate method
|
1031
|
+
# like we do for the `submit`/`apply_async` split?
|
1032
|
+
parameters_list = expand_mapping_parameters(self.fn, parameters)
|
1033
|
+
# TODO: Make this non-blocking once we can return a list of futures
|
1034
|
+
# instead of a list of task runs
|
1035
|
+
return [
|
1036
|
+
run_coro_as_sync(self.create_run(parameters=parameters, deferred=True))
|
1037
|
+
for parameters in parameters_list
|
1038
|
+
]
|
1039
|
+
|
1040
|
+
from prefect.task_runners import TaskRunner
|
1041
|
+
|
1042
|
+
task_runner = flow_run_context.task_runner
|
1043
|
+
assert isinstance(task_runner, TaskRunner)
|
1044
|
+
futures = task_runner.map(self, parameters, wait_for)
|
1045
|
+
if return_state:
|
1046
|
+
states = []
|
1047
|
+
for future in futures:
|
1048
|
+
future.wait()
|
1049
|
+
states.append(future.state)
|
1050
|
+
return states
|
1051
|
+
else:
|
1052
|
+
return futures
|
1053
|
+
|
1054
|
+
def apply_async(
|
1055
|
+
self,
|
1056
|
+
args: Optional[Tuple[Any, ...]] = None,
|
1057
|
+
kwargs: Optional[Dict[str, Any]] = None,
|
1058
|
+
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
1059
|
+
dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
|
1060
|
+
) -> PrefectDistributedFuture:
|
1061
|
+
"""
|
1062
|
+
Create a pending task run for a task server to execute.
|
1063
|
+
|
1064
|
+
Args:
|
1065
|
+
args: Arguments to run the task with
|
1066
|
+
kwargs: Keyword arguments to run the task with
|
1067
|
+
|
1068
|
+
Returns:
|
1069
|
+
A PrefectDistributedFuture object representing the pending task run
|
1070
|
+
|
1071
|
+
Examples:
|
1072
|
+
|
1073
|
+
Define a task
|
1074
|
+
|
1075
|
+
>>> from prefect import task
|
1076
|
+
>>> @task
|
1077
|
+
>>> def my_task(name: str = "world"):
|
1078
|
+
>>> return f"hello {name}"
|
1079
|
+
|
1080
|
+
Create a pending task run for the task
|
1081
|
+
|
1082
|
+
>>> from prefect import flow
|
1083
|
+
>>> @flow
|
1084
|
+
>>> def my_flow():
|
1085
|
+
>>> my_task.apply_async(("marvin",))
|
1086
|
+
|
1087
|
+
Wait for a task to finish
|
1088
|
+
|
1089
|
+
>>> @flow
|
1090
|
+
>>> def my_flow():
|
1091
|
+
>>> my_task.apply_async(("marvin",)).wait()
|
1092
|
+
|
1093
|
+
|
1094
|
+
>>> @flow
|
1095
|
+
>>> def my_flow():
|
1096
|
+
>>> print(my_task.apply_async(("marvin",)).result())
|
1097
|
+
>>>
|
1098
|
+
>>> my_flow()
|
1099
|
+
hello marvin
|
1100
|
+
|
1101
|
+
TODO: Enforce ordering between tasks that do not exchange data
|
1102
|
+
>>> @task
|
1103
|
+
>>> def task_1():
|
1104
|
+
>>> pass
|
1105
|
+
>>>
|
1106
|
+
>>> @task
|
1107
|
+
>>> def task_2():
|
1108
|
+
>>> pass
|
1109
|
+
>>>
|
1110
|
+
>>> @flow
|
1111
|
+
>>> def my_flow():
|
1112
|
+
>>> x = task_1.apply_async()
|
1113
|
+
>>>
|
1114
|
+
>>> # task 2 will wait for task_1 to complete
|
1115
|
+
>>> y = task_2.apply_async(wait_for=[x])
|
1116
|
+
|
1117
|
+
"""
|
1118
|
+
from prefect.utilities.visualization import (
|
1119
|
+
VisualizationUnsupportedError,
|
1120
|
+
get_task_viz_tracker,
|
1121
|
+
)
|
1122
|
+
|
1123
|
+
task_viz_tracker = get_task_viz_tracker()
|
1124
|
+
if task_viz_tracker:
|
1125
|
+
raise VisualizationUnsupportedError(
|
1126
|
+
"`task.apply_async()` is not currently supported by `flow.visualize()`"
|
1127
|
+
)
|
1128
|
+
args = args or ()
|
1129
|
+
kwargs = kwargs or {}
|
1130
|
+
|
1131
|
+
# Convert the call args/kwargs to a parameter dict
|
1132
|
+
parameters = get_call_parameters(self.fn, args, kwargs)
|
1133
|
+
|
1134
|
+
task_run = run_coro_as_sync(
|
1135
|
+
self.create_run(
|
1166
1136
|
parameters=parameters,
|
1167
|
-
|
1137
|
+
deferred=True,
|
1168
1138
|
wait_for=wait_for,
|
1169
|
-
|
1170
|
-
task_runner=None,
|
1171
|
-
autonomous=True,
|
1139
|
+
extra_task_inputs=dependencies,
|
1172
1140
|
)
|
1173
|
-
if self.isasync:
|
1174
|
-
return from_async.wait_for_call_in_loop_thread(map_call)
|
1175
|
-
else:
|
1176
|
-
return from_sync.wait_for_call_in_loop_thread(map_call)
|
1177
|
-
|
1178
|
-
return enter_task_run_engine(
|
1179
|
-
self,
|
1180
|
-
parameters=parameters,
|
1181
|
-
wait_for=wait_for,
|
1182
|
-
return_type=return_type,
|
1183
|
-
task_runner=None,
|
1184
|
-
mapped=True,
|
1185
1141
|
)
|
1142
|
+
return PrefectDistributedFuture(task_run_id=task_run.id)
|
1143
|
+
|
1144
|
+
def delay(self, *args: P.args, **kwargs: P.kwargs) -> PrefectDistributedFuture:
|
1145
|
+
"""
|
1146
|
+
An alias for `apply_async` with simpler calling semantics.
|
1147
|
+
|
1148
|
+
Avoids having to use explicit "args" and "kwargs" arguments. Arguments
|
1149
|
+
will pass through as-is to the task.
|
1150
|
+
|
1151
|
+
Examples:
|
1152
|
+
|
1153
|
+
Define a task
|
1154
|
+
|
1155
|
+
>>> from prefect import task
|
1156
|
+
>>> @task
|
1157
|
+
>>> def my_task(name: str = "world"):
|
1158
|
+
>>> return f"hello {name}"
|
1159
|
+
|
1160
|
+
Create a pending task run for the task
|
1161
|
+
|
1162
|
+
>>> from prefect import flow
|
1163
|
+
>>> @flow
|
1164
|
+
>>> def my_flow():
|
1165
|
+
>>> my_task.delay("marvin")
|
1166
|
+
|
1167
|
+
Wait for a task to finish
|
1168
|
+
|
1169
|
+
>>> @flow
|
1170
|
+
>>> def my_flow():
|
1171
|
+
>>> my_task.delay("marvin").wait()
|
1186
1172
|
|
1187
|
-
|
1173
|
+
Use the result from a task in a flow
|
1174
|
+
|
1175
|
+
>>> @flow
|
1176
|
+
>>> def my_flow():
|
1177
|
+
>>> print(my_task.delay("marvin").result())
|
1178
|
+
>>>
|
1179
|
+
>>> my_flow()
|
1180
|
+
hello marvin
|
1181
|
+
"""
|
1182
|
+
return self.apply_async(args=args, kwargs=kwargs)
|
1183
|
+
|
1184
|
+
def serve(self, task_runner: Optional["BaseTaskRunner"] = None) -> "Task":
|
1188
1185
|
"""Serve the task using the provided task runner. This method is used to
|
1189
1186
|
establish a websocket connection with the Prefect server and listen for
|
1190
1187
|
submitted task runs to execute.
|
@@ -1201,13 +1198,6 @@ class Task(Generic[P, R]):
|
|
1201
1198
|
|
1202
1199
|
>>> my_task.serve()
|
1203
1200
|
"""
|
1204
|
-
|
1205
|
-
if not PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING:
|
1206
|
-
raise ValueError(
|
1207
|
-
"Task's `serve` method is an experimental feature and must be enabled with "
|
1208
|
-
"`prefect config set PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING=True`"
|
1209
|
-
)
|
1210
|
-
|
1211
1201
|
from prefect.task_server import serve
|
1212
1202
|
|
1213
1203
|
serve(self, task_runner=task_runner)
|