prefect-client 2.19.3__py3-none-any.whl → 3.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (239) hide show
  1. prefect/__init__.py +8 -56
  2. prefect/_internal/compatibility/deprecated.py +6 -115
  3. prefect/_internal/compatibility/experimental.py +4 -79
  4. prefect/_internal/concurrency/api.py +0 -34
  5. prefect/_internal/concurrency/calls.py +0 -6
  6. prefect/_internal/concurrency/cancellation.py +0 -3
  7. prefect/_internal/concurrency/event_loop.py +0 -20
  8. prefect/_internal/concurrency/inspection.py +3 -3
  9. prefect/_internal/concurrency/threads.py +35 -0
  10. prefect/_internal/concurrency/waiters.py +0 -28
  11. prefect/_internal/pydantic/__init__.py +0 -45
  12. prefect/_internal/pydantic/v1_schema.py +21 -22
  13. prefect/_internal/pydantic/v2_schema.py +0 -2
  14. prefect/_internal/pydantic/v2_validated_func.py +18 -23
  15. prefect/_internal/schemas/bases.py +44 -177
  16. prefect/_internal/schemas/fields.py +1 -43
  17. prefect/_internal/schemas/validators.py +60 -158
  18. prefect/artifacts.py +161 -14
  19. prefect/automations.py +39 -4
  20. prefect/blocks/abstract.py +1 -1
  21. prefect/blocks/core.py +268 -148
  22. prefect/blocks/fields.py +2 -57
  23. prefect/blocks/kubernetes.py +8 -12
  24. prefect/blocks/notifications.py +40 -20
  25. prefect/blocks/system.py +22 -11
  26. prefect/blocks/webhook.py +2 -9
  27. prefect/client/base.py +4 -4
  28. prefect/client/cloud.py +8 -13
  29. prefect/client/orchestration.py +347 -341
  30. prefect/client/schemas/actions.py +92 -86
  31. prefect/client/schemas/filters.py +20 -40
  32. prefect/client/schemas/objects.py +147 -145
  33. prefect/client/schemas/responses.py +16 -24
  34. prefect/client/schemas/schedules.py +47 -35
  35. prefect/client/subscriptions.py +2 -2
  36. prefect/client/utilities.py +5 -2
  37. prefect/concurrency/asyncio.py +3 -1
  38. prefect/concurrency/events.py +1 -1
  39. prefect/concurrency/services.py +6 -3
  40. prefect/context.py +195 -27
  41. prefect/deployments/__init__.py +5 -6
  42. prefect/deployments/base.py +7 -5
  43. prefect/deployments/flow_runs.py +185 -0
  44. prefect/deployments/runner.py +50 -45
  45. prefect/deployments/schedules.py +28 -23
  46. prefect/deployments/steps/__init__.py +0 -1
  47. prefect/deployments/steps/core.py +1 -0
  48. prefect/deployments/steps/pull.py +7 -21
  49. prefect/engine.py +12 -2422
  50. prefect/events/actions.py +17 -23
  51. prefect/events/cli/automations.py +19 -6
  52. prefect/events/clients.py +14 -37
  53. prefect/events/filters.py +14 -18
  54. prefect/events/related.py +2 -2
  55. prefect/events/schemas/__init__.py +0 -5
  56. prefect/events/schemas/automations.py +55 -46
  57. prefect/events/schemas/deployment_triggers.py +7 -197
  58. prefect/events/schemas/events.py +34 -65
  59. prefect/events/schemas/labelling.py +10 -14
  60. prefect/events/utilities.py +2 -3
  61. prefect/events/worker.py +2 -3
  62. prefect/filesystems.py +6 -517
  63. prefect/{new_flow_engine.py → flow_engine.py} +313 -72
  64. prefect/flow_runs.py +377 -5
  65. prefect/flows.py +248 -165
  66. prefect/futures.py +186 -345
  67. prefect/infrastructure/__init__.py +0 -27
  68. prefect/infrastructure/provisioners/__init__.py +5 -3
  69. prefect/infrastructure/provisioners/cloud_run.py +11 -6
  70. prefect/infrastructure/provisioners/container_instance.py +11 -7
  71. prefect/infrastructure/provisioners/ecs.py +6 -4
  72. prefect/infrastructure/provisioners/modal.py +8 -5
  73. prefect/input/actions.py +2 -4
  74. prefect/input/run_input.py +5 -7
  75. prefect/logging/formatters.py +0 -2
  76. prefect/logging/handlers.py +3 -11
  77. prefect/logging/loggers.py +2 -2
  78. prefect/manifests.py +2 -1
  79. prefect/records/__init__.py +1 -0
  80. prefect/records/result_store.py +42 -0
  81. prefect/records/store.py +9 -0
  82. prefect/results.py +43 -39
  83. prefect/runner/runner.py +9 -9
  84. prefect/runner/server.py +6 -10
  85. prefect/runner/storage.py +3 -8
  86. prefect/runner/submit.py +2 -2
  87. prefect/runner/utils.py +2 -2
  88. prefect/serializers.py +24 -35
  89. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +5 -14
  90. prefect/settings.py +70 -133
  91. prefect/states.py +17 -47
  92. prefect/task_engine.py +697 -58
  93. prefect/task_runners.py +269 -301
  94. prefect/task_server.py +53 -34
  95. prefect/tasks.py +327 -337
  96. prefect/transactions.py +220 -0
  97. prefect/types/__init__.py +61 -82
  98. prefect/utilities/asyncutils.py +195 -136
  99. prefect/utilities/callables.py +121 -41
  100. prefect/utilities/collections.py +23 -38
  101. prefect/utilities/dispatch.py +11 -3
  102. prefect/utilities/dockerutils.py +4 -0
  103. prefect/utilities/engine.py +140 -20
  104. prefect/utilities/importtools.py +26 -27
  105. prefect/utilities/pydantic.py +128 -38
  106. prefect/utilities/schema_tools/hydration.py +5 -1
  107. prefect/utilities/templating.py +12 -2
  108. prefect/variables.py +78 -61
  109. prefect/workers/__init__.py +0 -1
  110. prefect/workers/base.py +15 -17
  111. prefect/workers/process.py +3 -8
  112. prefect/workers/server.py +2 -2
  113. {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/METADATA +22 -21
  114. prefect_client-3.0.0rc1.dist-info/RECORD +176 -0
  115. prefect/_internal/pydantic/_base_model.py +0 -51
  116. prefect/_internal/pydantic/_compat.py +0 -82
  117. prefect/_internal/pydantic/_flags.py +0 -20
  118. prefect/_internal/pydantic/_types.py +0 -8
  119. prefect/_internal/pydantic/utilities/__init__.py +0 -0
  120. prefect/_internal/pydantic/utilities/config_dict.py +0 -72
  121. prefect/_internal/pydantic/utilities/field_validator.py +0 -150
  122. prefect/_internal/pydantic/utilities/model_construct.py +0 -56
  123. prefect/_internal/pydantic/utilities/model_copy.py +0 -55
  124. prefect/_internal/pydantic/utilities/model_dump.py +0 -136
  125. prefect/_internal/pydantic/utilities/model_dump_json.py +0 -112
  126. prefect/_internal/pydantic/utilities/model_fields.py +0 -50
  127. prefect/_internal/pydantic/utilities/model_fields_set.py +0 -29
  128. prefect/_internal/pydantic/utilities/model_json_schema.py +0 -82
  129. prefect/_internal/pydantic/utilities/model_rebuild.py +0 -80
  130. prefect/_internal/pydantic/utilities/model_validate.py +0 -75
  131. prefect/_internal/pydantic/utilities/model_validate_json.py +0 -68
  132. prefect/_internal/pydantic/utilities/model_validator.py +0 -87
  133. prefect/_internal/pydantic/utilities/type_adapter.py +0 -71
  134. prefect/_vendor/__init__.py +0 -0
  135. prefect/_vendor/fastapi/__init__.py +0 -25
  136. prefect/_vendor/fastapi/applications.py +0 -946
  137. prefect/_vendor/fastapi/background.py +0 -3
  138. prefect/_vendor/fastapi/concurrency.py +0 -44
  139. prefect/_vendor/fastapi/datastructures.py +0 -58
  140. prefect/_vendor/fastapi/dependencies/__init__.py +0 -0
  141. prefect/_vendor/fastapi/dependencies/models.py +0 -64
  142. prefect/_vendor/fastapi/dependencies/utils.py +0 -877
  143. prefect/_vendor/fastapi/encoders.py +0 -177
  144. prefect/_vendor/fastapi/exception_handlers.py +0 -40
  145. prefect/_vendor/fastapi/exceptions.py +0 -46
  146. prefect/_vendor/fastapi/logger.py +0 -3
  147. prefect/_vendor/fastapi/middleware/__init__.py +0 -1
  148. prefect/_vendor/fastapi/middleware/asyncexitstack.py +0 -25
  149. prefect/_vendor/fastapi/middleware/cors.py +0 -3
  150. prefect/_vendor/fastapi/middleware/gzip.py +0 -3
  151. prefect/_vendor/fastapi/middleware/httpsredirect.py +0 -3
  152. prefect/_vendor/fastapi/middleware/trustedhost.py +0 -3
  153. prefect/_vendor/fastapi/middleware/wsgi.py +0 -3
  154. prefect/_vendor/fastapi/openapi/__init__.py +0 -0
  155. prefect/_vendor/fastapi/openapi/constants.py +0 -2
  156. prefect/_vendor/fastapi/openapi/docs.py +0 -203
  157. prefect/_vendor/fastapi/openapi/models.py +0 -480
  158. prefect/_vendor/fastapi/openapi/utils.py +0 -485
  159. prefect/_vendor/fastapi/param_functions.py +0 -340
  160. prefect/_vendor/fastapi/params.py +0 -453
  161. prefect/_vendor/fastapi/requests.py +0 -4
  162. prefect/_vendor/fastapi/responses.py +0 -40
  163. prefect/_vendor/fastapi/routing.py +0 -1331
  164. prefect/_vendor/fastapi/security/__init__.py +0 -15
  165. prefect/_vendor/fastapi/security/api_key.py +0 -98
  166. prefect/_vendor/fastapi/security/base.py +0 -6
  167. prefect/_vendor/fastapi/security/http.py +0 -172
  168. prefect/_vendor/fastapi/security/oauth2.py +0 -227
  169. prefect/_vendor/fastapi/security/open_id_connect_url.py +0 -34
  170. prefect/_vendor/fastapi/security/utils.py +0 -10
  171. prefect/_vendor/fastapi/staticfiles.py +0 -1
  172. prefect/_vendor/fastapi/templating.py +0 -3
  173. prefect/_vendor/fastapi/testclient.py +0 -1
  174. prefect/_vendor/fastapi/types.py +0 -3
  175. prefect/_vendor/fastapi/utils.py +0 -235
  176. prefect/_vendor/fastapi/websockets.py +0 -7
  177. prefect/_vendor/starlette/__init__.py +0 -1
  178. prefect/_vendor/starlette/_compat.py +0 -28
  179. prefect/_vendor/starlette/_exception_handler.py +0 -80
  180. prefect/_vendor/starlette/_utils.py +0 -88
  181. prefect/_vendor/starlette/applications.py +0 -261
  182. prefect/_vendor/starlette/authentication.py +0 -159
  183. prefect/_vendor/starlette/background.py +0 -43
  184. prefect/_vendor/starlette/concurrency.py +0 -59
  185. prefect/_vendor/starlette/config.py +0 -151
  186. prefect/_vendor/starlette/convertors.py +0 -87
  187. prefect/_vendor/starlette/datastructures.py +0 -707
  188. prefect/_vendor/starlette/endpoints.py +0 -130
  189. prefect/_vendor/starlette/exceptions.py +0 -60
  190. prefect/_vendor/starlette/formparsers.py +0 -276
  191. prefect/_vendor/starlette/middleware/__init__.py +0 -17
  192. prefect/_vendor/starlette/middleware/authentication.py +0 -52
  193. prefect/_vendor/starlette/middleware/base.py +0 -220
  194. prefect/_vendor/starlette/middleware/cors.py +0 -176
  195. prefect/_vendor/starlette/middleware/errors.py +0 -265
  196. prefect/_vendor/starlette/middleware/exceptions.py +0 -74
  197. prefect/_vendor/starlette/middleware/gzip.py +0 -113
  198. prefect/_vendor/starlette/middleware/httpsredirect.py +0 -19
  199. prefect/_vendor/starlette/middleware/sessions.py +0 -82
  200. prefect/_vendor/starlette/middleware/trustedhost.py +0 -64
  201. prefect/_vendor/starlette/middleware/wsgi.py +0 -147
  202. prefect/_vendor/starlette/requests.py +0 -328
  203. prefect/_vendor/starlette/responses.py +0 -347
  204. prefect/_vendor/starlette/routing.py +0 -933
  205. prefect/_vendor/starlette/schemas.py +0 -154
  206. prefect/_vendor/starlette/staticfiles.py +0 -248
  207. prefect/_vendor/starlette/status.py +0 -199
  208. prefect/_vendor/starlette/templating.py +0 -231
  209. prefect/_vendor/starlette/testclient.py +0 -804
  210. prefect/_vendor/starlette/types.py +0 -30
  211. prefect/_vendor/starlette/websockets.py +0 -193
  212. prefect/agent.py +0 -698
  213. prefect/deployments/deployments.py +0 -1042
  214. prefect/deprecated/__init__.py +0 -0
  215. prefect/deprecated/data_documents.py +0 -350
  216. prefect/deprecated/packaging/__init__.py +0 -12
  217. prefect/deprecated/packaging/base.py +0 -96
  218. prefect/deprecated/packaging/docker.py +0 -146
  219. prefect/deprecated/packaging/file.py +0 -92
  220. prefect/deprecated/packaging/orion.py +0 -80
  221. prefect/deprecated/packaging/serializers.py +0 -171
  222. prefect/events/instrument.py +0 -135
  223. prefect/infrastructure/base.py +0 -323
  224. prefect/infrastructure/container.py +0 -818
  225. prefect/infrastructure/kubernetes.py +0 -920
  226. prefect/infrastructure/process.py +0 -289
  227. prefect/new_task_engine.py +0 -423
  228. prefect/pydantic/__init__.py +0 -76
  229. prefect/pydantic/main.py +0 -39
  230. prefect/software/__init__.py +0 -2
  231. prefect/software/base.py +0 -50
  232. prefect/software/conda.py +0 -199
  233. prefect/software/pip.py +0 -122
  234. prefect/software/python.py +0 -52
  235. prefect/workers/block.py +0 -218
  236. prefect_client-2.19.3.dist-info/RECORD +0 -292
  237. {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/LICENSE +0 -0
  238. {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/WHEEL +0 -0
  239. {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/top_level.txt +0 -0
prefect/tasks.py CHANGED
@@ -14,7 +14,6 @@ from typing import (
14
14
  Any,
15
15
  Awaitable,
16
16
  Callable,
17
- Coroutine,
18
17
  Dict,
19
18
  Generic,
20
19
  Iterable,
@@ -22,17 +21,17 @@ from typing import (
22
21
  NoReturn,
23
22
  Optional,
24
23
  Set,
24
+ Tuple,
25
25
  TypeVar,
26
26
  Union,
27
27
  cast,
28
28
  overload,
29
29
  )
30
- from uuid import uuid4
30
+ from uuid import UUID, uuid4
31
31
 
32
32
  from typing_extensions import Literal, ParamSpec
33
33
 
34
- from prefect._internal.concurrency.api import create_call, from_async, from_sync
35
- from prefect.client.orchestration import PrefectClient, SyncPrefectClient
34
+ from prefect.client.orchestration import get_client
36
35
  from prefect.client.schemas import TaskRun
37
36
  from prefect.client.schemas.objects import TaskRunInput, TaskRunResult
38
37
  from prefect.context import (
@@ -40,40 +39,38 @@ from prefect.context import (
40
39
  PrefectObjectRegistry,
41
40
  TagsContext,
42
41
  TaskRunContext,
42
+ serialize_context,
43
43
  )
44
- from prefect.futures import PrefectFuture
45
- from prefect.logging.loggers import get_logger, get_run_logger
46
- from prefect.results import ResultSerializer, ResultStorage
44
+ from prefect.futures import PrefectDistributedFuture, PrefectFuture
45
+ from prefect.logging.loggers import get_logger
46
+ from prefect.results import ResultFactory, ResultSerializer, ResultStorage
47
47
  from prefect.settings import (
48
- PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE,
49
- PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING,
50
48
  PREFECT_TASK_DEFAULT_RETRIES,
51
49
  PREFECT_TASK_DEFAULT_RETRY_DELAY_SECONDS,
52
50
  )
53
- from prefect.states import Pending, State
54
- from prefect.task_runners import BaseTaskRunner
51
+ from prefect.states import Pending, Scheduled, State
55
52
  from prefect.utilities.annotations import NotSet
56
- from prefect.utilities.asyncutils import Async, Sync
53
+ from prefect.utilities.asyncutils import run_coro_as_sync
57
54
  from prefect.utilities.callables import (
55
+ expand_mapping_parameters,
58
56
  get_call_parameters,
59
57
  raise_for_reserved_arguments,
60
58
  )
61
59
  from prefect.utilities.hashing import hash_objects
62
60
  from prefect.utilities.importtools import to_qualified_name
63
- from prefect.utilities.visualization import (
64
- VisualizationUnsupportedError,
65
- get_task_viz_tracker,
66
- track_viz_task,
67
- )
68
61
 
69
62
  if TYPE_CHECKING:
63
+ from prefect.client.orchestration import PrefectClient
70
64
  from prefect.context import TaskRunContext
71
-
65
+ from prefect.task_runners import BaseTaskRunner
66
+ from prefect.transactions import Transaction
72
67
 
73
68
  T = TypeVar("T") # Generic type var for capturing the inner return type of async funcs
74
69
  R = TypeVar("R") # The return type of the user's function
75
70
  P = ParamSpec("P") # The parameters of the task
76
71
 
72
+ NUM_CHARS_DYNAMIC_KEY = 8
73
+
77
74
  logger = get_logger("tasks")
78
75
 
79
76
 
@@ -189,6 +186,8 @@ class Task(Generic[P, R]):
189
186
  execution with matching cache key is used.
190
187
  on_failure: An optional list of callables to run when the task enters a failed state.
191
188
  on_completion: An optional list of callables to run when the task enters a completed state.
189
+ on_commit: An optional list of callables to run when the task's idempotency record is committed.
190
+ on_rollback: An optional list of callables to run when the task rolls back.
192
191
  retry_condition_fn: An optional callable run when a task run returns a Failed state. Should
193
192
  return `True` if the task should continue to its retry policy (e.g. `retries=3`), and `False` if the task
194
193
  should end as failed. Defaults to `None`, indicating the task should always continue
@@ -230,6 +229,8 @@ class Task(Generic[P, R]):
230
229
  refresh_cache: Optional[bool] = None,
231
230
  on_completion: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
232
231
  on_failure: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
232
+ on_rollback: Optional[List[Callable[["Transaction"], None]]] = None,
233
+ on_commit: Optional[List[Callable[["Transaction"], None]]] = None,
233
234
  retry_condition_fn: Optional[Callable[["Task", TaskRun, State], bool]] = None,
234
235
  viz_return_value: Optional[Any] = None,
235
236
  ):
@@ -238,8 +239,6 @@ class Task(Generic[P, R]):
238
239
  hook_names = ["on_completion", "on_failure"]
239
240
  for hooks, hook_name in zip(hook_categories, hook_names):
240
241
  if hooks is not None:
241
- if not hooks:
242
- raise ValueError(f"Empty list passed for '{hook_name}'")
243
242
  try:
244
243
  hooks = list(hooks)
245
244
  except TypeError:
@@ -247,8 +246,8 @@ class Task(Generic[P, R]):
247
246
  f"Expected iterable for '{hook_name}'; got"
248
247
  f" {type(hooks).__name__} instead. Please provide a list of"
249
248
  f" hooks to '{hook_name}':\n\n"
250
- f"@flow({hook_name}=[hook1, hook2])\ndef"
251
- " my_flow():\n\tpass"
249
+ f"@task({hook_name}=[hook1, hook2])\ndef"
250
+ " my_task():\n\tpass"
252
251
  )
253
252
 
254
253
  for hook in hooks:
@@ -257,8 +256,8 @@ class Task(Generic[P, R]):
257
256
  f"Expected callables in '{hook_name}'; got"
258
257
  f" {type(hook).__name__} instead. Please provide a list of"
259
258
  f" hooks to '{hook_name}':\n\n"
260
- f"@flow({hook_name}=[hook1, hook2])\ndef"
261
- " my_flow():\n\tpass"
259
+ f"@task({hook_name}=[hook1, hook2])\ndef"
260
+ " my_task():\n\tpass"
262
261
  )
263
262
 
264
263
  if not callable(fn):
@@ -338,8 +337,10 @@ class Task(Generic[P, R]):
338
337
  self.result_storage_key = result_storage_key
339
338
  self.cache_result_in_memory = cache_result_in_memory
340
339
  self.timeout_seconds = float(timeout_seconds) if timeout_seconds else None
341
- self.on_completion = on_completion
342
- self.on_failure = on_failure
340
+ self.on_rollback_hooks = on_rollback or []
341
+ self.on_commit_hooks = on_commit or []
342
+ self.on_completion_hooks = on_completion or []
343
+ self.on_failure_hooks = on_failure or []
343
344
 
344
345
  # retry_condition_fn must be a callable or None. If it is neither, raise a TypeError
345
346
  if retry_condition_fn is not None and not (callable(retry_condition_fn)):
@@ -510,25 +511,50 @@ class Task(Generic[P, R]):
510
511
  refresh_cache=(
511
512
  refresh_cache if refresh_cache is not NotSet else self.refresh_cache
512
513
  ),
513
- on_completion=on_completion or self.on_completion,
514
- on_failure=on_failure or self.on_failure,
514
+ on_completion=on_completion or self.on_completion_hooks,
515
+ on_failure=on_failure or self.on_failure_hooks,
515
516
  retry_condition_fn=retry_condition_fn or self.retry_condition_fn,
516
517
  viz_return_value=viz_return_value or self.viz_return_value,
517
518
  )
518
519
 
520
+ def on_completion(
521
+ self, fn: Callable[["Task", TaskRun, State], None]
522
+ ) -> Callable[["Task", TaskRun, State], None]:
523
+ self.on_completion_hooks.append(fn)
524
+ return fn
525
+
526
+ def on_failure(
527
+ self, fn: Callable[["Task", TaskRun, State], None]
528
+ ) -> Callable[["Task", TaskRun, State], None]:
529
+ self.on_failure_hooks.append(fn)
530
+ return fn
531
+
532
+ def on_commit(
533
+ self, fn: Callable[["Transaction"], None]
534
+ ) -> Callable[["Transaction"], None]:
535
+ self.on_commit_hooks.append(fn)
536
+ return fn
537
+
538
+ def on_rollback(
539
+ self, fn: Callable[["Transaction"], None]
540
+ ) -> Callable[["Transaction"], None]:
541
+ self.on_rollback_hooks.append(fn)
542
+ return fn
543
+
519
544
  async def create_run(
520
545
  self,
521
- client: Optional[Union[PrefectClient, SyncPrefectClient]],
522
- parameters: Dict[str, Any] = None,
546
+ client: Optional["PrefectClient"] = None,
547
+ id: Optional[UUID] = None,
548
+ parameters: Optional[Dict[str, Any]] = None,
523
549
  flow_run_context: Optional[FlowRunContext] = None,
524
550
  parent_task_run_context: Optional[TaskRunContext] = None,
525
551
  wait_for: Optional[Iterable[PrefectFuture]] = None,
526
552
  extra_task_inputs: Optional[Dict[str, Set[TaskRunInput]]] = None,
553
+ deferred: bool = False,
527
554
  ) -> TaskRun:
528
555
  from prefect.utilities.engine import (
529
556
  _dynamic_key_for_task_run,
530
- _resolve_custom_task_run_name,
531
- collect_task_run_inputs,
557
+ collect_task_run_inputs_sync,
532
558
  )
533
559
 
534
560
  if flow_run_context is None:
@@ -537,76 +563,96 @@ class Task(Generic[P, R]):
537
563
  parent_task_run_context = TaskRunContext.get()
538
564
  if parameters is None:
539
565
  parameters = {}
566
+ if client is None:
567
+ client = get_client()
540
568
 
541
- try:
542
- task_run_name = _resolve_custom_task_run_name(self, parameters)
543
- except TypeError:
544
- task_run_name = None
545
-
546
- if flow_run_context:
547
- dynamic_key = _dynamic_key_for_task_run(context=flow_run_context, task=self)
548
- else:
549
- dynamic_key = uuid4().hex
550
-
551
- # collect task inputs
552
- task_inputs = {
553
- k: await collect_task_run_inputs(v) for k, v in parameters.items()
554
- }
555
-
556
- # check if this task has a parent task run based on running in another
557
- # task run's existing context. A task run is only considered a parent if
558
- # it is in the same flow run (because otherwise presumably the child is
559
- # in a subflow, so the subflow serves as the parent) or if there is no
560
- # flow run
561
- if parent_task_run_context:
562
- # there is no flow run
569
+ async with client:
563
570
  if not flow_run_context:
564
- task_inputs["__parents__"] = [
565
- TaskRunResult(id=parent_task_run_context.task_run.id)
566
- ]
567
- # there is a flow run and the task run is in the same flow run
568
- elif (
569
- flow_run_context
570
- and parent_task_run_context.task_run.flow_run_id
571
- == flow_run_context.flow_run.id
572
- ):
573
- task_inputs["__parents__"] = [
574
- TaskRunResult(id=parent_task_run_context.task_run.id)
575
- ]
576
-
577
- if wait_for:
578
- task_inputs["wait_for"] = await collect_task_run_inputs(wait_for)
579
-
580
- # Join extra task inputs
581
- for k, extras in (extra_task_inputs or {}).items():
582
- task_inputs[k] = task_inputs[k].union(extras)
583
-
584
- # create the task run
585
- task_run = client.create_task_run(
586
- task=self,
587
- name=task_run_name,
588
- flow_run_id=(
589
- getattr(flow_run_context.flow_run, "id", None)
590
- if flow_run_context and flow_run_context.flow_run
591
- else None
592
- ),
593
- dynamic_key=str(dynamic_key),
594
- state=Pending(),
595
- task_inputs=task_inputs,
596
- extra_tags=TagsContext.get().current_tags,
597
- )
598
- # the new engine uses sync clients but old engines use async clients
599
- if inspect.isawaitable(task_run):
600
- task_run = await task_run
571
+ dynamic_key = f"{self.task_key}-{str(uuid4().hex)}"
572
+ task_run_name = f"{self.name}-{dynamic_key[:NUM_CHARS_DYNAMIC_KEY]}"
573
+ else:
574
+ dynamic_key = _dynamic_key_for_task_run(
575
+ context=flow_run_context, task=self
576
+ )
577
+ task_run_name = f"{self.name}-{dynamic_key}"
601
578
 
602
- if flow_run_context and flow_run_context.flow_run:
603
- get_run_logger(flow_run_context).debug(
604
- f"Created task run {task_run.name!r} for task {self.name!r}"
579
+ if deferred:
580
+ state = Scheduled()
581
+ state.state_details.deferred = True
582
+ else:
583
+ state = Pending()
584
+
585
+ # store parameters for background tasks so that task servers
586
+ # can retrieve them at runtime
587
+ if deferred and (parameters or wait_for):
588
+ parameters_id = uuid4()
589
+ state.state_details.task_parameters_id = parameters_id
590
+
591
+ # TODO: Improve use of result storage for parameter storage / reference
592
+ self.persist_result = True
593
+
594
+ factory = await ResultFactory.from_autonomous_task(self, client=client)
595
+ context = serialize_context()
596
+ data: Dict[str, Any] = {"context": context}
597
+ if parameters:
598
+ data["parameters"] = parameters
599
+ if wait_for:
600
+ data["wait_for"] = wait_for
601
+ await factory.store_parameters(parameters_id, data)
602
+
603
+ # collect task inputs
604
+ task_inputs = {
605
+ k: collect_task_run_inputs_sync(v) for k, v in parameters.items()
606
+ }
607
+
608
+ # check if this task has a parent task run based on running in another
609
+ # task run's existing context. A task run is only considered a parent if
610
+ # it is in the same flow run (because otherwise presumably the child is
611
+ # in a subflow, so the subflow serves as the parent) or if there is no
612
+ # flow run
613
+ if parent_task_run_context:
614
+ # there is no flow run
615
+ if not flow_run_context:
616
+ task_inputs["__parents__"] = [
617
+ TaskRunResult(id=parent_task_run_context.task_run.id)
618
+ ]
619
+ # there is a flow run and the task run is in the same flow run
620
+ elif (
621
+ flow_run_context
622
+ and parent_task_run_context.task_run.flow_run_id
623
+ == getattr(flow_run_context.flow_run, "id", None)
624
+ ):
625
+ task_inputs["__parents__"] = [
626
+ TaskRunResult(id=parent_task_run_context.task_run.id)
627
+ ]
628
+
629
+ if wait_for:
630
+ task_inputs["wait_for"] = collect_task_run_inputs_sync(wait_for)
631
+
632
+ # Join extra task inputs
633
+ for k, extras in (extra_task_inputs or {}).items():
634
+ task_inputs[k] = task_inputs[k].union(extras)
635
+
636
+ # create the task run
637
+ task_run = client.create_task_run(
638
+ task=self,
639
+ name=task_run_name,
640
+ flow_run_id=(
641
+ getattr(flow_run_context.flow_run, "id", None)
642
+ if flow_run_context and flow_run_context.flow_run
643
+ else None
644
+ ),
645
+ dynamic_key=str(dynamic_key),
646
+ id=id,
647
+ state=state,
648
+ task_inputs=task_inputs,
649
+ extra_tags=TagsContext.get().current_tags,
605
650
  )
606
- else:
607
- logger.debug(f"Created task run {task_run.name!r} for task {self.name!r}")
651
+ # the new engine uses sync clients but old engines use async clients
652
+ if inspect.isawaitable(task_run):
653
+ task_run = await task_run
608
654
 
609
- return task_run
655
+ return task_run
610
656
 
611
657
  @overload
612
658
  def __call__(
@@ -646,9 +692,10 @@ class Task(Generic[P, R]):
646
692
  Run the task and return the result. If `return_state` is True returns
647
693
  the result is wrapped in a Prefect State which provides error handling.
648
694
  """
649
- from prefect.engine import enter_task_run_engine
650
- from prefect.task_engine import submit_autonomous_task_run_to_engine
651
- from prefect.task_runners import SequentialTaskRunner
695
+ from prefect.utilities.visualization import (
696
+ get_task_viz_tracker,
697
+ track_viz_task,
698
+ )
652
699
 
653
700
  # Convert the call args/kwargs to a parameter dict
654
701
  parameters = get_call_parameters(self.fn, args, kwargs)
@@ -661,88 +708,13 @@ class Task(Generic[P, R]):
661
708
  self.isasync, self.name, parameters, self.viz_return_value
662
709
  )
663
710
 
664
- if PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE.value():
665
- from prefect.new_task_engine import run_task
711
+ from prefect.task_engine import run_task
666
712
 
667
- return run_task(
668
- task=self,
669
- parameters=parameters,
670
- wait_for=wait_for,
671
- return_type=return_type,
672
- )
673
-
674
- if (
675
- PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
676
- and not FlowRunContext.get()
677
- ):
678
- from prefect import get_client
679
-
680
- return submit_autonomous_task_run_to_engine(
681
- task=self,
682
- task_run=None,
683
- task_runner=SequentialTaskRunner(),
684
- parameters=parameters,
685
- return_type=return_type,
686
- client=get_client(),
687
- )
688
-
689
- return enter_task_run_engine(
690
- self,
713
+ return run_task(
714
+ task=self,
691
715
  parameters=parameters,
692
716
  wait_for=wait_for,
693
- task_runner=SequentialTaskRunner(),
694
717
  return_type=return_type,
695
- mapped=False,
696
- )
697
-
698
- @overload
699
- def _run(
700
- self: "Task[P, NoReturn]",
701
- *args: P.args,
702
- **kwargs: P.kwargs,
703
- ) -> PrefectFuture[None, Sync]:
704
- # `NoReturn` matches if a type can't be inferred for the function which stops a
705
- # sync function from matching the `Coroutine` overload
706
- ...
707
-
708
- @overload
709
- def _run(
710
- self: "Task[P, Coroutine[Any, Any, T]]",
711
- *args: P.args,
712
- **kwargs: P.kwargs,
713
- ) -> Awaitable[State[T]]:
714
- ...
715
-
716
- @overload
717
- def _run(
718
- self: "Task[P, T]",
719
- *args: P.args,
720
- **kwargs: P.kwargs,
721
- ) -> State[T]:
722
- ...
723
-
724
- def _run(
725
- self,
726
- *args: P.args,
727
- wait_for: Optional[Iterable[PrefectFuture]] = None,
728
- **kwargs: P.kwargs,
729
- ) -> Union[State, Awaitable[State]]:
730
- """
731
- Run the task and return the final state.
732
- """
733
- from prefect.engine import enter_task_run_engine
734
- from prefect.task_runners import SequentialTaskRunner
735
-
736
- # Convert the call args/kwargs to a parameter dict
737
- parameters = get_call_parameters(self.fn, args, kwargs)
738
-
739
- return enter_task_run_engine(
740
- self,
741
- parameters=parameters,
742
- wait_for=wait_for,
743
- return_type="state",
744
- task_runner=SequentialTaskRunner(),
745
- mapped=False,
746
718
  )
747
719
 
748
720
  @overload
@@ -750,50 +722,27 @@ class Task(Generic[P, R]):
750
722
  self: "Task[P, NoReturn]",
751
723
  *args: P.args,
752
724
  **kwargs: P.kwargs,
753
- ) -> PrefectFuture[None, Sync]:
725
+ ) -> PrefectFuture:
754
726
  # `NoReturn` matches if a type can't be inferred for the function which stops a
755
727
  # sync function from matching the `Coroutine` overload
756
728
  ...
757
729
 
758
- @overload
759
- def submit(
760
- self: "Task[P, Coroutine[Any, Any, T]]",
761
- *args: P.args,
762
- **kwargs: P.kwargs,
763
- ) -> Awaitable[PrefectFuture[T, Async]]:
764
- ...
765
-
766
730
  @overload
767
731
  def submit(
768
732
  self: "Task[P, T]",
769
733
  *args: P.args,
770
734
  **kwargs: P.kwargs,
771
- ) -> PrefectFuture[T, Sync]:
735
+ ) -> PrefectFuture:
772
736
  ...
773
737
 
774
738
  @overload
775
739
  def submit(
776
740
  self: "Task[P, T]",
777
- *args: P.args,
778
741
  return_state: Literal[True],
779
- **kwargs: P.kwargs,
780
- ) -> State[T]:
781
- ...
782
-
783
- @overload
784
- def submit(
785
- self: "Task[P, T]",
786
- *args: P.args,
787
- **kwargs: P.kwargs,
788
- ) -> TaskRun:
789
- ...
790
-
791
- @overload
792
- def submit(
793
- self: "Task[P, Coroutine[Any, Any, T]]",
742
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
794
743
  *args: P.args,
795
744
  **kwargs: P.kwargs,
796
- ) -> Awaitable[TaskRun]:
745
+ ) -> State[T]:
797
746
  ...
798
747
 
799
748
  def submit(
@@ -802,19 +751,15 @@ class Task(Generic[P, R]):
802
751
  return_state: bool = False,
803
752
  wait_for: Optional[Iterable[PrefectFuture]] = None,
804
753
  **kwargs: Any,
805
- ) -> Union[PrefectFuture, Awaitable[PrefectFuture], TaskRun, Awaitable[TaskRun]]:
754
+ ):
806
755
  """
807
756
  Submit a run of the task to the engine.
808
757
 
809
758
  If writing an async task, this call must be awaited.
810
759
 
811
- If called from within a flow function,
812
-
813
760
  Will create a new task run in the backing API and submit the task to the flow's
814
761
  task runner. This call only blocks execution while the task is being submitted,
815
- once it is submitted, the flow function will continue executing. However, note
816
- that the `SequentialTaskRunner` does not implement parallel execution for sync tasks
817
- and they are fully resolved on submission.
762
+ once it is submitted, the flow function will continue executing.
818
763
 
819
764
  Args:
820
765
  *args: Arguments to run the task with
@@ -894,97 +839,29 @@ class Task(Generic[P, R]):
894
839
 
895
840
  """
896
841
 
897
- from prefect.engine import create_autonomous_task_run, enter_task_run_engine
842
+ from prefect.utilities.visualization import (
843
+ VisualizationUnsupportedError,
844
+ get_task_viz_tracker,
845
+ )
898
846
 
899
847
  # Convert the call args/kwargs to a parameter dict
900
848
  parameters = get_call_parameters(self.fn, args, kwargs)
901
- return_type = "state" if return_state else "future"
902
849
  flow_run_context = FlowRunContext.get()
903
850
 
851
+ if not flow_run_context:
852
+ raise ValueError("Task.submit() must be called within a flow")
853
+
904
854
  task_viz_tracker = get_task_viz_tracker()
905
855
  if task_viz_tracker:
906
856
  raise VisualizationUnsupportedError(
907
857
  "`task.submit()` is not currently supported by `flow.visualize()`"
908
858
  )
909
859
 
910
- if PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING and not flow_run_context:
911
- create_autonomous_task_run_call = create_call(
912
- create_autonomous_task_run, task=self, parameters=parameters
913
- )
914
- if self.isasync:
915
- return from_async.wait_for_call_in_loop_thread(
916
- create_autonomous_task_run_call
917
- )
918
- else:
919
- return from_sync.wait_for_call_in_loop_thread(
920
- create_autonomous_task_run_call
921
- )
922
- if PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE and flow_run_context:
923
- if self.isasync:
924
- return self._submit_async(
925
- parameters=parameters,
926
- flow_run_context=flow_run_context,
927
- wait_for=wait_for,
928
- return_state=return_state,
929
- )
930
- else:
931
- raise NotImplementedError(
932
- "Submitting sync tasks with the new engine has not be implemented yet."
933
- )
934
-
935
- else:
936
- return enter_task_run_engine(
937
- self,
938
- parameters=parameters,
939
- wait_for=wait_for,
940
- return_type=return_type,
941
- task_runner=None, # Use the flow's task runner
942
- mapped=False,
943
- )
944
-
945
- async def _submit_async(
946
- self,
947
- parameters: Dict[str, Any],
948
- flow_run_context: FlowRunContext,
949
- wait_for: Optional[Iterable[PrefectFuture]],
950
- return_state: bool,
951
- ):
952
- from prefect.new_task_engine import run_task_async
953
-
954
860
  task_runner = flow_run_context.task_runner
955
-
956
- task_run = await self.create_run(
957
- client=flow_run_context.client,
958
- flow_run_context=flow_run_context,
959
- parameters=parameters,
960
- wait_for=wait_for,
961
- )
962
-
963
- future = PrefectFuture(
964
- name=task_run.name,
965
- key=uuid4(),
966
- task_runner=task_runner,
967
- asynchronous=(self.isasync and flow_run_context.flow.isasync),
968
- )
969
- future.task_run = task_run
970
- flow_run_context.task_run_futures.append(future)
971
- await task_runner.submit(
972
- key=future.key,
973
- call=partial(
974
- run_task_async,
975
- task=self,
976
- task_run=task_run,
977
- parameters=parameters,
978
- wait_for=wait_for,
979
- return_type="state",
980
- ),
981
- )
982
- # TODO: I don't like this. Can we move responsibility for creating the future
983
- # and setting this anyio.Event to the task runner?
984
- future._submitted.set()
985
-
861
+ future = task_runner.submit(self, parameters, wait_for)
986
862
  if return_state:
987
- return await future.wait()
863
+ future.wait()
864
+ return future.state
988
865
  else:
989
866
  return future
990
867
 
@@ -993,32 +870,24 @@ class Task(Generic[P, R]):
993
870
  self: "Task[P, NoReturn]",
994
871
  *args: P.args,
995
872
  **kwargs: P.kwargs,
996
- ) -> List[PrefectFuture[None, Sync]]:
873
+ ) -> List[PrefectFuture]:
997
874
  # `NoReturn` matches if a type can't be inferred for the function which stops a
998
875
  # sync function from matching the `Coroutine` overload
999
876
  ...
1000
877
 
1001
- @overload
1002
- def map(
1003
- self: "Task[P, Coroutine[Any, Any, T]]",
1004
- *args: P.args,
1005
- **kwargs: P.kwargs,
1006
- ) -> Awaitable[List[PrefectFuture[T, Async]]]:
1007
- ...
1008
-
1009
878
  @overload
1010
879
  def map(
1011
880
  self: "Task[P, T]",
1012
881
  *args: P.args,
1013
882
  **kwargs: P.kwargs,
1014
- ) -> List[PrefectFuture[T, Sync]]:
883
+ ) -> List[PrefectFuture]:
1015
884
  ...
1016
885
 
1017
886
  @overload
1018
887
  def map(
1019
888
  self: "Task[P, T]",
1020
- *args: P.args,
1021
889
  return_state: Literal[True],
890
+ *args: P.args,
1022
891
  **kwargs: P.kwargs,
1023
892
  ) -> List[State[T]]:
1024
893
  ...
@@ -1029,7 +898,7 @@ class Task(Generic[P, R]):
1029
898
  return_state: bool = False,
1030
899
  wait_for: Optional[Iterable[PrefectFuture]] = None,
1031
900
  **kwargs: Any,
1032
- ) -> Any:
901
+ ):
1033
902
  """
1034
903
  Submit a mapped run of the task to a worker.
1035
904
 
@@ -1044,9 +913,7 @@ class Task(Generic[P, R]):
1044
913
  backing API and submit the task runs to the flow's task runner. This
1045
914
  call blocks if given a future as input while the future is resolved. It
1046
915
  also blocks while the tasks are being submitted, once they are
1047
- submitted, the flow function will continue executing. However, note
1048
- that the `SequentialTaskRunner` does not implement parallel execution
1049
- for sync tasks and they are fully resolved on submission.
916
+ submitted, the flow function will continue executing.
1050
917
 
1051
918
  Args:
1052
919
  *args: Iterable and static arguments to run the tasks with
@@ -1143,12 +1010,15 @@ class Task(Generic[P, R]):
1143
1010
  [[11, 21], [12, 22], [13, 23]]
1144
1011
  """
1145
1012
 
1146
- from prefect.engine import begin_task_map, enter_task_run_engine
1013
+ from prefect.utilities.visualization import (
1014
+ VisualizationUnsupportedError,
1015
+ get_task_viz_tracker,
1016
+ )
1147
1017
 
1148
1018
  # Convert the call args/kwargs to a parameter dict; do not apply defaults
1149
1019
  # since they should not be mapped over
1150
1020
  parameters = get_call_parameters(self.fn, args, kwargs, apply_defaults=False)
1151
- return_type = "state" if return_state else "future"
1021
+ flow_run_context = FlowRunContext.get()
1152
1022
 
1153
1023
  task_viz_tracker = get_task_viz_tracker()
1154
1024
  if task_viz_tracker:
@@ -1156,35 +1026,162 @@ class Task(Generic[P, R]):
1156
1026
  "`task.map()` is not currently supported by `flow.visualize()`"
1157
1027
  )
1158
1028
 
1159
- if (
1160
- PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
1161
- and not FlowRunContext.get()
1162
- ):
1163
- map_call = create_call(
1164
- begin_task_map,
1165
- task=self,
1029
+ if not flow_run_context:
1030
+ # TODO: Should we split out background task mapping into a separate method
1031
+ # like we do for the `submit`/`apply_async` split?
1032
+ parameters_list = expand_mapping_parameters(self.fn, parameters)
1033
+ # TODO: Make this non-blocking once we can return a list of futures
1034
+ # instead of a list of task runs
1035
+ return [
1036
+ run_coro_as_sync(self.create_run(parameters=parameters, deferred=True))
1037
+ for parameters in parameters_list
1038
+ ]
1039
+
1040
+ from prefect.task_runners import TaskRunner
1041
+
1042
+ task_runner = flow_run_context.task_runner
1043
+ assert isinstance(task_runner, TaskRunner)
1044
+ futures = task_runner.map(self, parameters, wait_for)
1045
+ if return_state:
1046
+ states = []
1047
+ for future in futures:
1048
+ future.wait()
1049
+ states.append(future.state)
1050
+ return states
1051
+ else:
1052
+ return futures
1053
+
1054
+ def apply_async(
1055
+ self,
1056
+ args: Optional[Tuple[Any, ...]] = None,
1057
+ kwargs: Optional[Dict[str, Any]] = None,
1058
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
1059
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1060
+ ) -> PrefectDistributedFuture:
1061
+ """
1062
+ Create a pending task run for a task server to execute.
1063
+
1064
+ Args:
1065
+ args: Arguments to run the task with
1066
+ kwargs: Keyword arguments to run the task with
1067
+
1068
+ Returns:
1069
+ A PrefectDistributedFuture object representing the pending task run
1070
+
1071
+ Examples:
1072
+
1073
+ Define a task
1074
+
1075
+ >>> from prefect import task
1076
+ >>> @task
1077
+ >>> def my_task(name: str = "world"):
1078
+ >>> return f"hello {name}"
1079
+
1080
+ Create a pending task run for the task
1081
+
1082
+ >>> from prefect import flow
1083
+ >>> @flow
1084
+ >>> def my_flow():
1085
+ >>> my_task.apply_async(("marvin",))
1086
+
1087
+ Wait for a task to finish
1088
+
1089
+ >>> @flow
1090
+ >>> def my_flow():
1091
+ >>> my_task.apply_async(("marvin",)).wait()
1092
+
1093
+
1094
+ >>> @flow
1095
+ >>> def my_flow():
1096
+ >>> print(my_task.apply_async(("marvin",)).result())
1097
+ >>>
1098
+ >>> my_flow()
1099
+ hello marvin
1100
+
1101
+ TODO: Enforce ordering between tasks that do not exchange data
1102
+ >>> @task
1103
+ >>> def task_1():
1104
+ >>> pass
1105
+ >>>
1106
+ >>> @task
1107
+ >>> def task_2():
1108
+ >>> pass
1109
+ >>>
1110
+ >>> @flow
1111
+ >>> def my_flow():
1112
+ >>> x = task_1.apply_async()
1113
+ >>>
1114
+ >>> # task 2 will wait for task_1 to complete
1115
+ >>> y = task_2.apply_async(wait_for=[x])
1116
+
1117
+ """
1118
+ from prefect.utilities.visualization import (
1119
+ VisualizationUnsupportedError,
1120
+ get_task_viz_tracker,
1121
+ )
1122
+
1123
+ task_viz_tracker = get_task_viz_tracker()
1124
+ if task_viz_tracker:
1125
+ raise VisualizationUnsupportedError(
1126
+ "`task.apply_async()` is not currently supported by `flow.visualize()`"
1127
+ )
1128
+ args = args or ()
1129
+ kwargs = kwargs or {}
1130
+
1131
+ # Convert the call args/kwargs to a parameter dict
1132
+ parameters = get_call_parameters(self.fn, args, kwargs)
1133
+
1134
+ task_run = run_coro_as_sync(
1135
+ self.create_run(
1166
1136
  parameters=parameters,
1167
- flow_run_context=None,
1137
+ deferred=True,
1168
1138
  wait_for=wait_for,
1169
- return_type=return_type,
1170
- task_runner=None,
1171
- autonomous=True,
1139
+ extra_task_inputs=dependencies,
1172
1140
  )
1173
- if self.isasync:
1174
- return from_async.wait_for_call_in_loop_thread(map_call)
1175
- else:
1176
- return from_sync.wait_for_call_in_loop_thread(map_call)
1177
-
1178
- return enter_task_run_engine(
1179
- self,
1180
- parameters=parameters,
1181
- wait_for=wait_for,
1182
- return_type=return_type,
1183
- task_runner=None,
1184
- mapped=True,
1185
1141
  )
1142
+ return PrefectDistributedFuture(task_run_id=task_run.id)
1143
+
1144
+ def delay(self, *args: P.args, **kwargs: P.kwargs) -> PrefectDistributedFuture:
1145
+ """
1146
+ An alias for `apply_async` with simpler calling semantics.
1147
+
1148
+ Avoids having to use explicit "args" and "kwargs" arguments. Arguments
1149
+ will pass through as-is to the task.
1150
+
1151
+ Examples:
1152
+
1153
+ Define a task
1154
+
1155
+ >>> from prefect import task
1156
+ >>> @task
1157
+ >>> def my_task(name: str = "world"):
1158
+ >>> return f"hello {name}"
1159
+
1160
+ Create a pending task run for the task
1161
+
1162
+ >>> from prefect import flow
1163
+ >>> @flow
1164
+ >>> def my_flow():
1165
+ >>> my_task.delay("marvin")
1166
+
1167
+ Wait for a task to finish
1168
+
1169
+ >>> @flow
1170
+ >>> def my_flow():
1171
+ >>> my_task.delay("marvin").wait()
1186
1172
 
1187
- def serve(self, task_runner: Optional[BaseTaskRunner] = None) -> "Task":
1173
+ Use the result from a task in a flow
1174
+
1175
+ >>> @flow
1176
+ >>> def my_flow():
1177
+ >>> print(my_task.delay("marvin").result())
1178
+ >>>
1179
+ >>> my_flow()
1180
+ hello marvin
1181
+ """
1182
+ return self.apply_async(args=args, kwargs=kwargs)
1183
+
1184
+ def serve(self, task_runner: Optional["BaseTaskRunner"] = None) -> "Task":
1188
1185
  """Serve the task using the provided task runner. This method is used to
1189
1186
  establish a websocket connection with the Prefect server and listen for
1190
1187
  submitted task runs to execute.
@@ -1201,13 +1198,6 @@ class Task(Generic[P, R]):
1201
1198
 
1202
1199
  >>> my_task.serve()
1203
1200
  """
1204
-
1205
- if not PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING:
1206
- raise ValueError(
1207
- "Task's `serve` method is an experimental feature and must be enabled with "
1208
- "`prefect config set PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING=True`"
1209
- )
1210
-
1211
1201
  from prefect.task_server import serve
1212
1202
 
1213
1203
  serve(self, task_runner=task_runner)