prefect-client 2.19.4__py3-none-any.whl → 3.0.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (242) hide show
  1. prefect/__init__.py +8 -56
  2. prefect/_internal/compatibility/deprecated.py +6 -115
  3. prefect/_internal/compatibility/experimental.py +4 -79
  4. prefect/_internal/concurrency/api.py +0 -34
  5. prefect/_internal/concurrency/calls.py +0 -6
  6. prefect/_internal/concurrency/cancellation.py +0 -3
  7. prefect/_internal/concurrency/event_loop.py +0 -20
  8. prefect/_internal/concurrency/inspection.py +3 -3
  9. prefect/_internal/concurrency/threads.py +35 -0
  10. prefect/_internal/concurrency/waiters.py +0 -28
  11. prefect/_internal/pydantic/__init__.py +0 -45
  12. prefect/_internal/pydantic/v1_schema.py +21 -22
  13. prefect/_internal/pydantic/v2_schema.py +0 -2
  14. prefect/_internal/pydantic/v2_validated_func.py +18 -23
  15. prefect/_internal/schemas/bases.py +44 -177
  16. prefect/_internal/schemas/fields.py +1 -43
  17. prefect/_internal/schemas/validators.py +60 -158
  18. prefect/artifacts.py +161 -14
  19. prefect/automations.py +39 -4
  20. prefect/blocks/abstract.py +1 -1
  21. prefect/blocks/core.py +268 -148
  22. prefect/blocks/fields.py +2 -57
  23. prefect/blocks/kubernetes.py +8 -12
  24. prefect/blocks/notifications.py +40 -20
  25. prefect/blocks/redis.py +168 -0
  26. prefect/blocks/system.py +22 -11
  27. prefect/blocks/webhook.py +2 -9
  28. prefect/client/base.py +4 -4
  29. prefect/client/cloud.py +8 -13
  30. prefect/client/orchestration.py +362 -340
  31. prefect/client/schemas/actions.py +92 -86
  32. prefect/client/schemas/filters.py +20 -40
  33. prefect/client/schemas/objects.py +158 -152
  34. prefect/client/schemas/responses.py +16 -24
  35. prefect/client/schemas/schedules.py +47 -35
  36. prefect/client/subscriptions.py +2 -2
  37. prefect/client/utilities.py +5 -2
  38. prefect/concurrency/asyncio.py +4 -2
  39. prefect/concurrency/events.py +1 -1
  40. prefect/concurrency/services.py +7 -4
  41. prefect/context.py +195 -27
  42. prefect/deployments/__init__.py +5 -6
  43. prefect/deployments/base.py +7 -5
  44. prefect/deployments/flow_runs.py +185 -0
  45. prefect/deployments/runner.py +50 -45
  46. prefect/deployments/schedules.py +28 -23
  47. prefect/deployments/steps/__init__.py +0 -1
  48. prefect/deployments/steps/core.py +1 -0
  49. prefect/deployments/steps/pull.py +7 -21
  50. prefect/engine.py +12 -2422
  51. prefect/events/actions.py +17 -23
  52. prefect/events/cli/automations.py +19 -6
  53. prefect/events/clients.py +14 -37
  54. prefect/events/filters.py +14 -18
  55. prefect/events/related.py +2 -2
  56. prefect/events/schemas/__init__.py +0 -5
  57. prefect/events/schemas/automations.py +55 -46
  58. prefect/events/schemas/deployment_triggers.py +7 -197
  59. prefect/events/schemas/events.py +36 -65
  60. prefect/events/schemas/labelling.py +10 -14
  61. prefect/events/utilities.py +2 -3
  62. prefect/events/worker.py +2 -3
  63. prefect/filesystems.py +6 -517
  64. prefect/{new_flow_engine.py → flow_engine.py} +315 -74
  65. prefect/flow_runs.py +379 -7
  66. prefect/flows.py +248 -165
  67. prefect/futures.py +187 -345
  68. prefect/infrastructure/__init__.py +0 -27
  69. prefect/infrastructure/provisioners/__init__.py +5 -3
  70. prefect/infrastructure/provisioners/cloud_run.py +11 -6
  71. prefect/infrastructure/provisioners/container_instance.py +11 -7
  72. prefect/infrastructure/provisioners/ecs.py +6 -4
  73. prefect/infrastructure/provisioners/modal.py +8 -5
  74. prefect/input/actions.py +2 -4
  75. prefect/input/run_input.py +9 -9
  76. prefect/logging/formatters.py +0 -2
  77. prefect/logging/handlers.py +3 -11
  78. prefect/logging/loggers.py +2 -2
  79. prefect/manifests.py +2 -1
  80. prefect/records/__init__.py +1 -0
  81. prefect/records/cache_policies.py +179 -0
  82. prefect/records/result_store.py +42 -0
  83. prefect/records/store.py +9 -0
  84. prefect/results.py +43 -39
  85. prefect/runner/runner.py +9 -9
  86. prefect/runner/server.py +6 -10
  87. prefect/runner/storage.py +3 -8
  88. prefect/runner/submit.py +2 -2
  89. prefect/runner/utils.py +2 -2
  90. prefect/serializers.py +24 -35
  91. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +5 -14
  92. prefect/settings.py +76 -136
  93. prefect/states.py +22 -50
  94. prefect/task_engine.py +666 -56
  95. prefect/task_runners.py +272 -300
  96. prefect/task_runs.py +203 -0
  97. prefect/{task_server.py → task_worker.py} +89 -60
  98. prefect/tasks.py +358 -341
  99. prefect/transactions.py +224 -0
  100. prefect/types/__init__.py +61 -82
  101. prefect/utilities/asyncutils.py +195 -136
  102. prefect/utilities/callables.py +121 -41
  103. prefect/utilities/collections.py +23 -38
  104. prefect/utilities/dispatch.py +11 -3
  105. prefect/utilities/dockerutils.py +4 -0
  106. prefect/utilities/engine.py +140 -20
  107. prefect/utilities/importtools.py +26 -27
  108. prefect/utilities/pydantic.py +128 -38
  109. prefect/utilities/schema_tools/hydration.py +5 -1
  110. prefect/utilities/templating.py +12 -2
  111. prefect/variables.py +84 -62
  112. prefect/workers/__init__.py +0 -1
  113. prefect/workers/base.py +26 -18
  114. prefect/workers/process.py +3 -8
  115. prefect/workers/server.py +2 -2
  116. {prefect_client-2.19.4.dist-info → prefect_client-3.0.0rc2.dist-info}/METADATA +23 -21
  117. prefect_client-3.0.0rc2.dist-info/RECORD +179 -0
  118. prefect/_internal/pydantic/_base_model.py +0 -51
  119. prefect/_internal/pydantic/_compat.py +0 -82
  120. prefect/_internal/pydantic/_flags.py +0 -20
  121. prefect/_internal/pydantic/_types.py +0 -8
  122. prefect/_internal/pydantic/utilities/__init__.py +0 -0
  123. prefect/_internal/pydantic/utilities/config_dict.py +0 -72
  124. prefect/_internal/pydantic/utilities/field_validator.py +0 -150
  125. prefect/_internal/pydantic/utilities/model_construct.py +0 -56
  126. prefect/_internal/pydantic/utilities/model_copy.py +0 -55
  127. prefect/_internal/pydantic/utilities/model_dump.py +0 -136
  128. prefect/_internal/pydantic/utilities/model_dump_json.py +0 -112
  129. prefect/_internal/pydantic/utilities/model_fields.py +0 -50
  130. prefect/_internal/pydantic/utilities/model_fields_set.py +0 -29
  131. prefect/_internal/pydantic/utilities/model_json_schema.py +0 -82
  132. prefect/_internal/pydantic/utilities/model_rebuild.py +0 -80
  133. prefect/_internal/pydantic/utilities/model_validate.py +0 -75
  134. prefect/_internal/pydantic/utilities/model_validate_json.py +0 -68
  135. prefect/_internal/pydantic/utilities/model_validator.py +0 -87
  136. prefect/_internal/pydantic/utilities/type_adapter.py +0 -71
  137. prefect/_vendor/__init__.py +0 -0
  138. prefect/_vendor/fastapi/__init__.py +0 -25
  139. prefect/_vendor/fastapi/applications.py +0 -946
  140. prefect/_vendor/fastapi/background.py +0 -3
  141. prefect/_vendor/fastapi/concurrency.py +0 -44
  142. prefect/_vendor/fastapi/datastructures.py +0 -58
  143. prefect/_vendor/fastapi/dependencies/__init__.py +0 -0
  144. prefect/_vendor/fastapi/dependencies/models.py +0 -64
  145. prefect/_vendor/fastapi/dependencies/utils.py +0 -877
  146. prefect/_vendor/fastapi/encoders.py +0 -177
  147. prefect/_vendor/fastapi/exception_handlers.py +0 -40
  148. prefect/_vendor/fastapi/exceptions.py +0 -46
  149. prefect/_vendor/fastapi/logger.py +0 -3
  150. prefect/_vendor/fastapi/middleware/__init__.py +0 -1
  151. prefect/_vendor/fastapi/middleware/asyncexitstack.py +0 -25
  152. prefect/_vendor/fastapi/middleware/cors.py +0 -3
  153. prefect/_vendor/fastapi/middleware/gzip.py +0 -3
  154. prefect/_vendor/fastapi/middleware/httpsredirect.py +0 -3
  155. prefect/_vendor/fastapi/middleware/trustedhost.py +0 -3
  156. prefect/_vendor/fastapi/middleware/wsgi.py +0 -3
  157. prefect/_vendor/fastapi/openapi/__init__.py +0 -0
  158. prefect/_vendor/fastapi/openapi/constants.py +0 -2
  159. prefect/_vendor/fastapi/openapi/docs.py +0 -203
  160. prefect/_vendor/fastapi/openapi/models.py +0 -480
  161. prefect/_vendor/fastapi/openapi/utils.py +0 -485
  162. prefect/_vendor/fastapi/param_functions.py +0 -340
  163. prefect/_vendor/fastapi/params.py +0 -453
  164. prefect/_vendor/fastapi/requests.py +0 -4
  165. prefect/_vendor/fastapi/responses.py +0 -40
  166. prefect/_vendor/fastapi/routing.py +0 -1331
  167. prefect/_vendor/fastapi/security/__init__.py +0 -15
  168. prefect/_vendor/fastapi/security/api_key.py +0 -98
  169. prefect/_vendor/fastapi/security/base.py +0 -6
  170. prefect/_vendor/fastapi/security/http.py +0 -172
  171. prefect/_vendor/fastapi/security/oauth2.py +0 -227
  172. prefect/_vendor/fastapi/security/open_id_connect_url.py +0 -34
  173. prefect/_vendor/fastapi/security/utils.py +0 -10
  174. prefect/_vendor/fastapi/staticfiles.py +0 -1
  175. prefect/_vendor/fastapi/templating.py +0 -3
  176. prefect/_vendor/fastapi/testclient.py +0 -1
  177. prefect/_vendor/fastapi/types.py +0 -3
  178. prefect/_vendor/fastapi/utils.py +0 -235
  179. prefect/_vendor/fastapi/websockets.py +0 -7
  180. prefect/_vendor/starlette/__init__.py +0 -1
  181. prefect/_vendor/starlette/_compat.py +0 -28
  182. prefect/_vendor/starlette/_exception_handler.py +0 -80
  183. prefect/_vendor/starlette/_utils.py +0 -88
  184. prefect/_vendor/starlette/applications.py +0 -261
  185. prefect/_vendor/starlette/authentication.py +0 -159
  186. prefect/_vendor/starlette/background.py +0 -43
  187. prefect/_vendor/starlette/concurrency.py +0 -59
  188. prefect/_vendor/starlette/config.py +0 -151
  189. prefect/_vendor/starlette/convertors.py +0 -87
  190. prefect/_vendor/starlette/datastructures.py +0 -707
  191. prefect/_vendor/starlette/endpoints.py +0 -130
  192. prefect/_vendor/starlette/exceptions.py +0 -60
  193. prefect/_vendor/starlette/formparsers.py +0 -276
  194. prefect/_vendor/starlette/middleware/__init__.py +0 -17
  195. prefect/_vendor/starlette/middleware/authentication.py +0 -52
  196. prefect/_vendor/starlette/middleware/base.py +0 -220
  197. prefect/_vendor/starlette/middleware/cors.py +0 -176
  198. prefect/_vendor/starlette/middleware/errors.py +0 -265
  199. prefect/_vendor/starlette/middleware/exceptions.py +0 -74
  200. prefect/_vendor/starlette/middleware/gzip.py +0 -113
  201. prefect/_vendor/starlette/middleware/httpsredirect.py +0 -19
  202. prefect/_vendor/starlette/middleware/sessions.py +0 -82
  203. prefect/_vendor/starlette/middleware/trustedhost.py +0 -64
  204. prefect/_vendor/starlette/middleware/wsgi.py +0 -147
  205. prefect/_vendor/starlette/requests.py +0 -328
  206. prefect/_vendor/starlette/responses.py +0 -347
  207. prefect/_vendor/starlette/routing.py +0 -933
  208. prefect/_vendor/starlette/schemas.py +0 -154
  209. prefect/_vendor/starlette/staticfiles.py +0 -248
  210. prefect/_vendor/starlette/status.py +0 -199
  211. prefect/_vendor/starlette/templating.py +0 -231
  212. prefect/_vendor/starlette/testclient.py +0 -804
  213. prefect/_vendor/starlette/types.py +0 -30
  214. prefect/_vendor/starlette/websockets.py +0 -193
  215. prefect/agent.py +0 -698
  216. prefect/deployments/deployments.py +0 -1042
  217. prefect/deprecated/__init__.py +0 -0
  218. prefect/deprecated/data_documents.py +0 -350
  219. prefect/deprecated/packaging/__init__.py +0 -12
  220. prefect/deprecated/packaging/base.py +0 -96
  221. prefect/deprecated/packaging/docker.py +0 -146
  222. prefect/deprecated/packaging/file.py +0 -92
  223. prefect/deprecated/packaging/orion.py +0 -80
  224. prefect/deprecated/packaging/serializers.py +0 -171
  225. prefect/events/instrument.py +0 -135
  226. prefect/infrastructure/base.py +0 -323
  227. prefect/infrastructure/container.py +0 -818
  228. prefect/infrastructure/kubernetes.py +0 -920
  229. prefect/infrastructure/process.py +0 -289
  230. prefect/new_task_engine.py +0 -423
  231. prefect/pydantic/__init__.py +0 -76
  232. prefect/pydantic/main.py +0 -39
  233. prefect/software/__init__.py +0 -2
  234. prefect/software/base.py +0 -50
  235. prefect/software/conda.py +0 -199
  236. prefect/software/pip.py +0 -122
  237. prefect/software/python.py +0 -52
  238. prefect/workers/block.py +0 -218
  239. prefect_client-2.19.4.dist-info/RECORD +0 -292
  240. {prefect_client-2.19.4.dist-info → prefect_client-3.0.0rc2.dist-info}/LICENSE +0 -0
  241. {prefect_client-2.19.4.dist-info → prefect_client-3.0.0rc2.dist-info}/WHEEL +0 -0
  242. {prefect_client-2.19.4.dist-info → prefect_client-3.0.0rc2.dist-info}/top_level.txt +0 -0
prefect/tasks.py CHANGED
@@ -14,7 +14,6 @@ from typing import (
14
14
  Any,
15
15
  Awaitable,
16
16
  Callable,
17
- Coroutine,
18
17
  Dict,
19
18
  Generic,
20
19
  Iterable,
@@ -22,17 +21,17 @@ from typing import (
22
21
  NoReturn,
23
22
  Optional,
24
23
  Set,
24
+ Tuple,
25
25
  TypeVar,
26
26
  Union,
27
27
  cast,
28
28
  overload,
29
29
  )
30
- from uuid import uuid4
30
+ from uuid import UUID, uuid4
31
31
 
32
32
  from typing_extensions import Literal, ParamSpec
33
33
 
34
- from prefect._internal.concurrency.api import create_call, from_async, from_sync
35
- from prefect.client.orchestration import PrefectClient, SyncPrefectClient
34
+ from prefect.client.orchestration import get_client
36
35
  from prefect.client.schemas import TaskRun
37
36
  from prefect.client.schemas.objects import TaskRunInput, TaskRunResult
38
37
  from prefect.context import (
@@ -40,40 +39,38 @@ from prefect.context import (
40
39
  PrefectObjectRegistry,
41
40
  TagsContext,
42
41
  TaskRunContext,
42
+ serialize_context,
43
43
  )
44
- from prefect.futures import PrefectFuture
45
- from prefect.logging.loggers import get_logger, get_run_logger
46
- from prefect.results import ResultSerializer, ResultStorage
44
+ from prefect.futures import PrefectDistributedFuture, PrefectFuture
45
+ from prefect.logging.loggers import get_logger
46
+ from prefect.records.cache_policies import DEFAULT, CachePolicy
47
+ from prefect.results import ResultFactory, ResultSerializer, ResultStorage
47
48
  from prefect.settings import (
48
- PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE,
49
- PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING,
50
49
  PREFECT_TASK_DEFAULT_RETRIES,
51
50
  PREFECT_TASK_DEFAULT_RETRY_DELAY_SECONDS,
52
51
  )
53
- from prefect.states import Pending, State
54
- from prefect.task_runners import BaseTaskRunner
52
+ from prefect.states import Pending, Scheduled, State
55
53
  from prefect.utilities.annotations import NotSet
56
- from prefect.utilities.asyncutils import Async, Sync
54
+ from prefect.utilities.asyncutils import run_coro_as_sync
57
55
  from prefect.utilities.callables import (
56
+ expand_mapping_parameters,
58
57
  get_call_parameters,
59
58
  raise_for_reserved_arguments,
60
59
  )
61
60
  from prefect.utilities.hashing import hash_objects
62
61
  from prefect.utilities.importtools import to_qualified_name
63
- from prefect.utilities.visualization import (
64
- VisualizationUnsupportedError,
65
- get_task_viz_tracker,
66
- track_viz_task,
67
- )
68
62
 
69
63
  if TYPE_CHECKING:
64
+ from prefect.client.orchestration import PrefectClient
70
65
  from prefect.context import TaskRunContext
71
-
66
+ from prefect.transactions import Transaction
72
67
 
73
68
  T = TypeVar("T") # Generic type var for capturing the inner return type of async funcs
74
69
  R = TypeVar("R") # The return type of the user's function
75
70
  P = ParamSpec("P") # The parameters of the task
76
71
 
72
+ NUM_CHARS_DYNAMIC_KEY = 8
73
+
77
74
  logger = get_logger("tasks")
78
75
 
79
76
 
@@ -148,6 +145,7 @@ class Task(Generic[P, R]):
148
145
  tags are combined with any tags defined by a `prefect.tags` context at
149
146
  task runtime.
150
147
  version: An optional string specifying the version of this task definition
148
+ cache_policy: A cache policy that determines the level of caching for this task
151
149
  cache_key_fn: An optional callable that, given the task run context and call
152
150
  parameters, generates a string key; if the key matches a previous completed
153
151
  state, that state result will be restored instead of running the task again.
@@ -189,6 +187,8 @@ class Task(Generic[P, R]):
189
187
  execution with matching cache key is used.
190
188
  on_failure: An optional list of callables to run when the task enters a failed state.
191
189
  on_completion: An optional list of callables to run when the task enters a completed state.
190
+ on_commit: An optional list of callables to run when the task's idempotency record is committed.
191
+ on_rollback: An optional list of callables to run when the task rolls back.
192
192
  retry_condition_fn: An optional callable run when a task run returns a Failed state. Should
193
193
  return `True` if the task should continue to its retry policy (e.g. `retries=3`), and `False` if the task
194
194
  should end as failed. Defaults to `None`, indicating the task should always continue
@@ -205,6 +205,7 @@ class Task(Generic[P, R]):
205
205
  description: Optional[str] = None,
206
206
  tags: Optional[Iterable[str]] = None,
207
207
  version: Optional[str] = None,
208
+ cache_policy: Optional[CachePolicy] = NotSet,
208
209
  cache_key_fn: Optional[
209
210
  Callable[["TaskRunContext", Dict[str, Any]], Optional[str]]
210
211
  ] = None,
@@ -230,6 +231,8 @@ class Task(Generic[P, R]):
230
231
  refresh_cache: Optional[bool] = None,
231
232
  on_completion: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
232
233
  on_failure: Optional[List[Callable[["Task", TaskRun, State], None]]] = None,
234
+ on_rollback: Optional[List[Callable[["Transaction"], None]]] = None,
235
+ on_commit: Optional[List[Callable[["Transaction"], None]]] = None,
233
236
  retry_condition_fn: Optional[Callable[["Task", TaskRun, State], bool]] = None,
234
237
  viz_return_value: Optional[Any] = None,
235
238
  ):
@@ -238,8 +241,6 @@ class Task(Generic[P, R]):
238
241
  hook_names = ["on_completion", "on_failure"]
239
242
  for hooks, hook_name in zip(hook_categories, hook_names):
240
243
  if hooks is not None:
241
- if not hooks:
242
- raise ValueError(f"Empty list passed for '{hook_name}'")
243
244
  try:
244
245
  hooks = list(hooks)
245
246
  except TypeError:
@@ -247,8 +248,8 @@ class Task(Generic[P, R]):
247
248
  f"Expected iterable for '{hook_name}'; got"
248
249
  f" {type(hooks).__name__} instead. Please provide a list of"
249
250
  f" hooks to '{hook_name}':\n\n"
250
- f"@flow({hook_name}=[hook1, hook2])\ndef"
251
- " my_flow():\n\tpass"
251
+ f"@task({hook_name}=[hook1, hook2])\ndef"
252
+ " my_task():\n\tpass"
252
253
  )
253
254
 
254
255
  for hook in hooks:
@@ -257,8 +258,8 @@ class Task(Generic[P, R]):
257
258
  f"Expected callables in '{hook_name}'; got"
258
259
  f" {type(hook).__name__} instead. Please provide a list of"
259
260
  f" hooks to '{hook_name}':\n\n"
260
- f"@flow({hook_name}=[hook1, hook2])\ndef"
261
- " my_flow():\n\tpass"
261
+ f"@task({hook_name}=[hook1, hook2])\ndef"
262
+ " my_task():\n\tpass"
262
263
  )
263
264
 
264
265
  if not callable(fn):
@@ -304,10 +305,23 @@ class Task(Generic[P, R]):
304
305
 
305
306
  self.task_key = f"{self.fn.__qualname__}-{task_origin_hash}"
306
307
 
308
+ # TODO: warn of precedence of cache policies and cache key fn if both provided?
309
+ if cache_key_fn:
310
+ cache_policy = CachePolicy.from_cache_key_fn(cache_key_fn)
311
+
312
+ # TODO: manage expiration and cache refresh
307
313
  self.cache_key_fn = cache_key_fn
308
314
  self.cache_expiration = cache_expiration
309
315
  self.refresh_cache = refresh_cache
310
316
 
317
+ if cache_policy is NotSet and result_storage_key is None:
318
+ self.cache_policy = DEFAULT
319
+ elif result_storage_key:
320
+ # TODO: handle this situation with double storage
321
+ self.cache_policy = None
322
+ else:
323
+ self.cache_policy = cache_policy
324
+
311
325
  # TaskRunPolicy settings
312
326
  # TODO: We can instantiate a `TaskRunPolicy` and add Pydantic bound checks to
313
327
  # validate that the user passes positive numbers here
@@ -338,8 +352,10 @@ class Task(Generic[P, R]):
338
352
  self.result_storage_key = result_storage_key
339
353
  self.cache_result_in_memory = cache_result_in_memory
340
354
  self.timeout_seconds = float(timeout_seconds) if timeout_seconds else None
341
- self.on_completion = on_completion
342
- self.on_failure = on_failure
355
+ self.on_rollback_hooks = on_rollback or []
356
+ self.on_commit_hooks = on_commit or []
357
+ self.on_completion_hooks = on_completion or []
358
+ self.on_failure_hooks = on_failure or []
343
359
 
344
360
  # retry_condition_fn must be a callable or None. If it is neither, raise a TypeError
345
361
  if retry_condition_fn is not None and not (callable(retry_condition_fn)):
@@ -357,6 +373,7 @@ class Task(Generic[P, R]):
357
373
  name: str = None,
358
374
  description: str = None,
359
375
  tags: Iterable[str] = None,
376
+ cache_policy: CachePolicy = NotSet,
360
377
  cache_key_fn: Callable[
361
378
  ["TaskRunContext", Dict[str, Any]], Optional[str]
362
379
  ] = None,
@@ -468,6 +485,9 @@ class Task(Generic[P, R]):
468
485
  name=name or self.name,
469
486
  description=description or self.description,
470
487
  tags=tags or copy(self.tags),
488
+ cache_policy=cache_policy
489
+ if cache_policy is not NotSet
490
+ else self.cache_policy,
471
491
  cache_key_fn=cache_key_fn or self.cache_key_fn,
472
492
  cache_expiration=cache_expiration or self.cache_expiration,
473
493
  task_run_name=task_run_name,
@@ -510,25 +530,50 @@ class Task(Generic[P, R]):
510
530
  refresh_cache=(
511
531
  refresh_cache if refresh_cache is not NotSet else self.refresh_cache
512
532
  ),
513
- on_completion=on_completion or self.on_completion,
514
- on_failure=on_failure or self.on_failure,
533
+ on_completion=on_completion or self.on_completion_hooks,
534
+ on_failure=on_failure or self.on_failure_hooks,
515
535
  retry_condition_fn=retry_condition_fn or self.retry_condition_fn,
516
536
  viz_return_value=viz_return_value or self.viz_return_value,
517
537
  )
518
538
 
539
+ def on_completion(
540
+ self, fn: Callable[["Task", TaskRun, State], None]
541
+ ) -> Callable[["Task", TaskRun, State], None]:
542
+ self.on_completion_hooks.append(fn)
543
+ return fn
544
+
545
+ def on_failure(
546
+ self, fn: Callable[["Task", TaskRun, State], None]
547
+ ) -> Callable[["Task", TaskRun, State], None]:
548
+ self.on_failure_hooks.append(fn)
549
+ return fn
550
+
551
+ def on_commit(
552
+ self, fn: Callable[["Transaction"], None]
553
+ ) -> Callable[["Transaction"], None]:
554
+ self.on_commit_hooks.append(fn)
555
+ return fn
556
+
557
+ def on_rollback(
558
+ self, fn: Callable[["Transaction"], None]
559
+ ) -> Callable[["Transaction"], None]:
560
+ self.on_rollback_hooks.append(fn)
561
+ return fn
562
+
519
563
  async def create_run(
520
564
  self,
521
- client: Optional[Union[PrefectClient, SyncPrefectClient]],
522
- parameters: Dict[str, Any] = None,
565
+ client: Optional["PrefectClient"] = None,
566
+ id: Optional[UUID] = None,
567
+ parameters: Optional[Dict[str, Any]] = None,
523
568
  flow_run_context: Optional[FlowRunContext] = None,
524
569
  parent_task_run_context: Optional[TaskRunContext] = None,
525
570
  wait_for: Optional[Iterable[PrefectFuture]] = None,
526
571
  extra_task_inputs: Optional[Dict[str, Set[TaskRunInput]]] = None,
572
+ deferred: bool = False,
527
573
  ) -> TaskRun:
528
574
  from prefect.utilities.engine import (
529
575
  _dynamic_key_for_task_run,
530
- _resolve_custom_task_run_name,
531
- collect_task_run_inputs,
576
+ collect_task_run_inputs_sync,
532
577
  )
533
578
 
534
579
  if flow_run_context is None:
@@ -537,76 +582,96 @@ class Task(Generic[P, R]):
537
582
  parent_task_run_context = TaskRunContext.get()
538
583
  if parameters is None:
539
584
  parameters = {}
585
+ if client is None:
586
+ client = get_client()
540
587
 
541
- try:
542
- task_run_name = _resolve_custom_task_run_name(self, parameters)
543
- except TypeError:
544
- task_run_name = None
545
-
546
- if flow_run_context:
547
- dynamic_key = _dynamic_key_for_task_run(context=flow_run_context, task=self)
548
- else:
549
- dynamic_key = uuid4().hex
550
-
551
- # collect task inputs
552
- task_inputs = {
553
- k: await collect_task_run_inputs(v) for k, v in parameters.items()
554
- }
555
-
556
- # check if this task has a parent task run based on running in another
557
- # task run's existing context. A task run is only considered a parent if
558
- # it is in the same flow run (because otherwise presumably the child is
559
- # in a subflow, so the subflow serves as the parent) or if there is no
560
- # flow run
561
- if parent_task_run_context:
562
- # there is no flow run
588
+ async with client:
563
589
  if not flow_run_context:
564
- task_inputs["__parents__"] = [
565
- TaskRunResult(id=parent_task_run_context.task_run.id)
566
- ]
567
- # there is a flow run and the task run is in the same flow run
568
- elif (
569
- flow_run_context
570
- and parent_task_run_context.task_run.flow_run_id
571
- == flow_run_context.flow_run.id
572
- ):
573
- task_inputs["__parents__"] = [
574
- TaskRunResult(id=parent_task_run_context.task_run.id)
575
- ]
576
-
577
- if wait_for:
578
- task_inputs["wait_for"] = await collect_task_run_inputs(wait_for)
579
-
580
- # Join extra task inputs
581
- for k, extras in (extra_task_inputs or {}).items():
582
- task_inputs[k] = task_inputs[k].union(extras)
583
-
584
- # create the task run
585
- task_run = client.create_task_run(
586
- task=self,
587
- name=task_run_name,
588
- flow_run_id=(
589
- getattr(flow_run_context.flow_run, "id", None)
590
- if flow_run_context and flow_run_context.flow_run
591
- else None
592
- ),
593
- dynamic_key=str(dynamic_key),
594
- state=Pending(),
595
- task_inputs=task_inputs,
596
- extra_tags=TagsContext.get().current_tags,
597
- )
598
- # the new engine uses sync clients but old engines use async clients
599
- if inspect.isawaitable(task_run):
600
- task_run = await task_run
590
+ dynamic_key = f"{self.task_key}-{str(uuid4().hex)}"
591
+ task_run_name = f"{self.name}-{dynamic_key[:NUM_CHARS_DYNAMIC_KEY]}"
592
+ else:
593
+ dynamic_key = _dynamic_key_for_task_run(
594
+ context=flow_run_context, task=self
595
+ )
596
+ task_run_name = f"{self.name}-{dynamic_key}"
601
597
 
602
- if flow_run_context and flow_run_context.flow_run:
603
- get_run_logger(flow_run_context).debug(
604
- f"Created task run {task_run.name!r} for task {self.name!r}"
598
+ if deferred:
599
+ state = Scheduled()
600
+ state.state_details.deferred = True
601
+ else:
602
+ state = Pending()
603
+
604
+ # store parameters for background tasks so that task worker
605
+ # can retrieve them at runtime
606
+ if deferred and (parameters or wait_for):
607
+ parameters_id = uuid4()
608
+ state.state_details.task_parameters_id = parameters_id
609
+
610
+ # TODO: Improve use of result storage for parameter storage / reference
611
+ self.persist_result = True
612
+
613
+ factory = await ResultFactory.from_autonomous_task(self, client=client)
614
+ context = serialize_context()
615
+ data: Dict[str, Any] = {"context": context}
616
+ if parameters:
617
+ data["parameters"] = parameters
618
+ if wait_for:
619
+ data["wait_for"] = wait_for
620
+ await factory.store_parameters(parameters_id, data)
621
+
622
+ # collect task inputs
623
+ task_inputs = {
624
+ k: collect_task_run_inputs_sync(v) for k, v in parameters.items()
625
+ }
626
+
627
+ # check if this task has a parent task run based on running in another
628
+ # task run's existing context. A task run is only considered a parent if
629
+ # it is in the same flow run (because otherwise presumably the child is
630
+ # in a subflow, so the subflow serves as the parent) or if there is no
631
+ # flow run
632
+ if parent_task_run_context:
633
+ # there is no flow run
634
+ if not flow_run_context:
635
+ task_inputs["__parents__"] = [
636
+ TaskRunResult(id=parent_task_run_context.task_run.id)
637
+ ]
638
+ # there is a flow run and the task run is in the same flow run
639
+ elif (
640
+ flow_run_context
641
+ and parent_task_run_context.task_run.flow_run_id
642
+ == getattr(flow_run_context.flow_run, "id", None)
643
+ ):
644
+ task_inputs["__parents__"] = [
645
+ TaskRunResult(id=parent_task_run_context.task_run.id)
646
+ ]
647
+
648
+ if wait_for:
649
+ task_inputs["wait_for"] = collect_task_run_inputs_sync(wait_for)
650
+
651
+ # Join extra task inputs
652
+ for k, extras in (extra_task_inputs or {}).items():
653
+ task_inputs[k] = task_inputs[k].union(extras)
654
+
655
+ # create the task run
656
+ task_run = client.create_task_run(
657
+ task=self,
658
+ name=task_run_name,
659
+ flow_run_id=(
660
+ getattr(flow_run_context.flow_run, "id", None)
661
+ if flow_run_context and flow_run_context.flow_run
662
+ else None
663
+ ),
664
+ dynamic_key=str(dynamic_key),
665
+ id=id,
666
+ state=state,
667
+ task_inputs=task_inputs,
668
+ extra_tags=TagsContext.get().current_tags,
605
669
  )
606
- else:
607
- logger.debug(f"Created task run {task_run.name!r} for task {self.name!r}")
670
+ # the new engine uses sync clients but old engines use async clients
671
+ if inspect.isawaitable(task_run):
672
+ task_run = await task_run
608
673
 
609
- return task_run
674
+ return task_run
610
675
 
611
676
  @overload
612
677
  def __call__(
@@ -646,9 +711,10 @@ class Task(Generic[P, R]):
646
711
  Run the task and return the result. If `return_state` is True returns
647
712
  the result is wrapped in a Prefect State which provides error handling.
648
713
  """
649
- from prefect.engine import enter_task_run_engine
650
- from prefect.task_engine import submit_autonomous_task_run_to_engine
651
- from prefect.task_runners import SequentialTaskRunner
714
+ from prefect.utilities.visualization import (
715
+ get_task_viz_tracker,
716
+ track_viz_task,
717
+ )
652
718
 
653
719
  # Convert the call args/kwargs to a parameter dict
654
720
  parameters = get_call_parameters(self.fn, args, kwargs)
@@ -661,88 +727,13 @@ class Task(Generic[P, R]):
661
727
  self.isasync, self.name, parameters, self.viz_return_value
662
728
  )
663
729
 
664
- if PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE.value():
665
- from prefect.new_task_engine import run_task
666
-
667
- return run_task(
668
- task=self,
669
- parameters=parameters,
670
- wait_for=wait_for,
671
- return_type=return_type,
672
- )
673
-
674
- if (
675
- PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
676
- and not FlowRunContext.get()
677
- ):
678
- from prefect import get_client
730
+ from prefect.task_engine import run_task
679
731
 
680
- return submit_autonomous_task_run_to_engine(
681
- task=self,
682
- task_run=None,
683
- task_runner=SequentialTaskRunner(),
684
- parameters=parameters,
685
- return_type=return_type,
686
- client=get_client(),
687
- )
688
-
689
- return enter_task_run_engine(
690
- self,
732
+ return run_task(
733
+ task=self,
691
734
  parameters=parameters,
692
735
  wait_for=wait_for,
693
- task_runner=SequentialTaskRunner(),
694
736
  return_type=return_type,
695
- mapped=False,
696
- )
697
-
698
- @overload
699
- def _run(
700
- self: "Task[P, NoReturn]",
701
- *args: P.args,
702
- **kwargs: P.kwargs,
703
- ) -> PrefectFuture[None, Sync]:
704
- # `NoReturn` matches if a type can't be inferred for the function which stops a
705
- # sync function from matching the `Coroutine` overload
706
- ...
707
-
708
- @overload
709
- def _run(
710
- self: "Task[P, Coroutine[Any, Any, T]]",
711
- *args: P.args,
712
- **kwargs: P.kwargs,
713
- ) -> Awaitable[State[T]]:
714
- ...
715
-
716
- @overload
717
- def _run(
718
- self: "Task[P, T]",
719
- *args: P.args,
720
- **kwargs: P.kwargs,
721
- ) -> State[T]:
722
- ...
723
-
724
- def _run(
725
- self,
726
- *args: P.args,
727
- wait_for: Optional[Iterable[PrefectFuture]] = None,
728
- **kwargs: P.kwargs,
729
- ) -> Union[State, Awaitable[State]]:
730
- """
731
- Run the task and return the final state.
732
- """
733
- from prefect.engine import enter_task_run_engine
734
- from prefect.task_runners import SequentialTaskRunner
735
-
736
- # Convert the call args/kwargs to a parameter dict
737
- parameters = get_call_parameters(self.fn, args, kwargs)
738
-
739
- return enter_task_run_engine(
740
- self,
741
- parameters=parameters,
742
- wait_for=wait_for,
743
- return_type="state",
744
- task_runner=SequentialTaskRunner(),
745
- mapped=False,
746
737
  )
747
738
 
748
739
  @overload
@@ -750,50 +741,27 @@ class Task(Generic[P, R]):
750
741
  self: "Task[P, NoReturn]",
751
742
  *args: P.args,
752
743
  **kwargs: P.kwargs,
753
- ) -> PrefectFuture[None, Sync]:
744
+ ) -> PrefectFuture:
754
745
  # `NoReturn` matches if a type can't be inferred for the function which stops a
755
746
  # sync function from matching the `Coroutine` overload
756
747
  ...
757
748
 
758
- @overload
759
- def submit(
760
- self: "Task[P, Coroutine[Any, Any, T]]",
761
- *args: P.args,
762
- **kwargs: P.kwargs,
763
- ) -> Awaitable[PrefectFuture[T, Async]]:
764
- ...
765
-
766
749
  @overload
767
750
  def submit(
768
751
  self: "Task[P, T]",
769
752
  *args: P.args,
770
753
  **kwargs: P.kwargs,
771
- ) -> PrefectFuture[T, Sync]:
754
+ ) -> PrefectFuture:
772
755
  ...
773
756
 
774
757
  @overload
775
758
  def submit(
776
759
  self: "Task[P, T]",
777
- *args: P.args,
778
760
  return_state: Literal[True],
779
- **kwargs: P.kwargs,
780
- ) -> State[T]:
781
- ...
782
-
783
- @overload
784
- def submit(
785
- self: "Task[P, T]",
786
- *args: P.args,
787
- **kwargs: P.kwargs,
788
- ) -> TaskRun:
789
- ...
790
-
791
- @overload
792
- def submit(
793
- self: "Task[P, Coroutine[Any, Any, T]]",
761
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
794
762
  *args: P.args,
795
763
  **kwargs: P.kwargs,
796
- ) -> Awaitable[TaskRun]:
764
+ ) -> State[T]:
797
765
  ...
798
766
 
799
767
  def submit(
@@ -802,19 +770,13 @@ class Task(Generic[P, R]):
802
770
  return_state: bool = False,
803
771
  wait_for: Optional[Iterable[PrefectFuture]] = None,
804
772
  **kwargs: Any,
805
- ) -> Union[PrefectFuture, Awaitable[PrefectFuture], TaskRun, Awaitable[TaskRun]]:
773
+ ):
806
774
  """
807
775
  Submit a run of the task to the engine.
808
776
 
809
- If writing an async task, this call must be awaited.
810
-
811
- If called from within a flow function,
812
-
813
777
  Will create a new task run in the backing API and submit the task to the flow's
814
778
  task runner. This call only blocks execution while the task is being submitted,
815
- once it is submitted, the flow function will continue executing. However, note
816
- that the `SequentialTaskRunner` does not implement parallel execution for sync tasks
817
- and they are fully resolved on submission.
779
+ once it is submitted, the flow function will continue executing.
818
780
 
819
781
  Args:
820
782
  *args: Arguments to run the task with
@@ -894,97 +856,33 @@ class Task(Generic[P, R]):
894
856
 
895
857
  """
896
858
 
897
- from prefect.engine import create_autonomous_task_run, enter_task_run_engine
859
+ from prefect.utilities.visualization import (
860
+ VisualizationUnsupportedError,
861
+ get_task_viz_tracker,
862
+ )
898
863
 
899
864
  # Convert the call args/kwargs to a parameter dict
900
865
  parameters = get_call_parameters(self.fn, args, kwargs)
901
- return_type = "state" if return_state else "future"
902
866
  flow_run_context = FlowRunContext.get()
903
867
 
868
+ if not flow_run_context:
869
+ raise RuntimeError(
870
+ "Unable to determine task runner to use for submission. If you are"
871
+ " submitting a task outside of a flow, please use `.delay`"
872
+ " to submit the task run for deferred execution."
873
+ )
874
+
904
875
  task_viz_tracker = get_task_viz_tracker()
905
876
  if task_viz_tracker:
906
877
  raise VisualizationUnsupportedError(
907
878
  "`task.submit()` is not currently supported by `flow.visualize()`"
908
879
  )
909
880
 
910
- if PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING and not flow_run_context:
911
- create_autonomous_task_run_call = create_call(
912
- create_autonomous_task_run, task=self, parameters=parameters
913
- )
914
- if self.isasync:
915
- return from_async.wait_for_call_in_loop_thread(
916
- create_autonomous_task_run_call
917
- )
918
- else:
919
- return from_sync.wait_for_call_in_loop_thread(
920
- create_autonomous_task_run_call
921
- )
922
- if PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE and flow_run_context:
923
- if self.isasync:
924
- return self._submit_async(
925
- parameters=parameters,
926
- flow_run_context=flow_run_context,
927
- wait_for=wait_for,
928
- return_state=return_state,
929
- )
930
- else:
931
- raise NotImplementedError(
932
- "Submitting sync tasks with the new engine has not be implemented yet."
933
- )
934
-
935
- else:
936
- return enter_task_run_engine(
937
- self,
938
- parameters=parameters,
939
- wait_for=wait_for,
940
- return_type=return_type,
941
- task_runner=None, # Use the flow's task runner
942
- mapped=False,
943
- )
944
-
945
- async def _submit_async(
946
- self,
947
- parameters: Dict[str, Any],
948
- flow_run_context: FlowRunContext,
949
- wait_for: Optional[Iterable[PrefectFuture]],
950
- return_state: bool,
951
- ):
952
- from prefect.new_task_engine import run_task_async
953
-
954
881
  task_runner = flow_run_context.task_runner
955
-
956
- task_run = await self.create_run(
957
- client=flow_run_context.client,
958
- flow_run_context=flow_run_context,
959
- parameters=parameters,
960
- wait_for=wait_for,
961
- )
962
-
963
- future = PrefectFuture(
964
- name=task_run.name,
965
- key=uuid4(),
966
- task_runner=task_runner,
967
- asynchronous=(self.isasync and flow_run_context.flow.isasync),
968
- )
969
- future.task_run = task_run
970
- flow_run_context.task_run_futures.append(future)
971
- await task_runner.submit(
972
- key=future.key,
973
- call=partial(
974
- run_task_async,
975
- task=self,
976
- task_run=task_run,
977
- parameters=parameters,
978
- wait_for=wait_for,
979
- return_type="state",
980
- ),
981
- )
982
- # TODO: I don't like this. Can we move responsibility for creating the future
983
- # and setting this anyio.Event to the task runner?
984
- future._submitted.set()
985
-
882
+ future = task_runner.submit(self, parameters, wait_for)
986
883
  if return_state:
987
- return await future.wait()
884
+ future.wait()
885
+ return future.state
988
886
  else:
989
887
  return future
990
888
 
@@ -993,32 +891,24 @@ class Task(Generic[P, R]):
993
891
  self: "Task[P, NoReturn]",
994
892
  *args: P.args,
995
893
  **kwargs: P.kwargs,
996
- ) -> List[PrefectFuture[None, Sync]]:
894
+ ) -> List[PrefectFuture]:
997
895
  # `NoReturn` matches if a type can't be inferred for the function which stops a
998
896
  # sync function from matching the `Coroutine` overload
999
897
  ...
1000
898
 
1001
- @overload
1002
- def map(
1003
- self: "Task[P, Coroutine[Any, Any, T]]",
1004
- *args: P.args,
1005
- **kwargs: P.kwargs,
1006
- ) -> Awaitable[List[PrefectFuture[T, Async]]]:
1007
- ...
1008
-
1009
899
  @overload
1010
900
  def map(
1011
901
  self: "Task[P, T]",
1012
902
  *args: P.args,
1013
903
  **kwargs: P.kwargs,
1014
- ) -> List[PrefectFuture[T, Sync]]:
904
+ ) -> List[PrefectFuture]:
1015
905
  ...
1016
906
 
1017
907
  @overload
1018
908
  def map(
1019
909
  self: "Task[P, T]",
1020
- *args: P.args,
1021
910
  return_state: Literal[True],
911
+ *args: P.args,
1022
912
  **kwargs: P.kwargs,
1023
913
  ) -> List[State[T]]:
1024
914
  ...
@@ -1028,8 +918,9 @@ class Task(Generic[P, R]):
1028
918
  *args: Any,
1029
919
  return_state: bool = False,
1030
920
  wait_for: Optional[Iterable[PrefectFuture]] = None,
921
+ deferred: bool = False,
1031
922
  **kwargs: Any,
1032
- ) -> Any:
923
+ ):
1033
924
  """
1034
925
  Submit a mapped run of the task to a worker.
1035
926
 
@@ -1044,9 +935,7 @@ class Task(Generic[P, R]):
1044
935
  backing API and submit the task runs to the flow's task runner. This
1045
936
  call blocks if given a future as input while the future is resolved. It
1046
937
  also blocks while the tasks are being submitted, once they are
1047
- submitted, the flow function will continue executing. However, note
1048
- that the `SequentialTaskRunner` does not implement parallel execution
1049
- for sync tasks and they are fully resolved on submission.
938
+ submitted, the flow function will continue executing.
1050
939
 
1051
940
  Args:
1052
941
  *args: Iterable and static arguments to run the tasks with
@@ -1143,12 +1032,16 @@ class Task(Generic[P, R]):
1143
1032
  [[11, 21], [12, 22], [13, 23]]
1144
1033
  """
1145
1034
 
1146
- from prefect.engine import begin_task_map, enter_task_run_engine
1035
+ from prefect.task_runners import TaskRunner
1036
+ from prefect.utilities.visualization import (
1037
+ VisualizationUnsupportedError,
1038
+ get_task_viz_tracker,
1039
+ )
1147
1040
 
1148
1041
  # Convert the call args/kwargs to a parameter dict; do not apply defaults
1149
1042
  # since they should not be mapped over
1150
1043
  parameters = get_call_parameters(self.fn, args, kwargs, apply_defaults=False)
1151
- return_type = "state" if return_state else "future"
1044
+ flow_run_context = FlowRunContext.get()
1152
1045
 
1153
1046
  task_viz_tracker = get_task_viz_tracker()
1154
1047
  if task_viz_tracker:
@@ -1156,35 +1049,162 @@ class Task(Generic[P, R]):
1156
1049
  "`task.map()` is not currently supported by `flow.visualize()`"
1157
1050
  )
1158
1051
 
1159
- if (
1160
- PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
1161
- and not FlowRunContext.get()
1162
- ):
1163
- map_call = create_call(
1164
- begin_task_map,
1165
- task=self,
1052
+ if deferred:
1053
+ parameters_list = expand_mapping_parameters(self.fn, parameters)
1054
+ futures = [
1055
+ self.apply_async(kwargs=parameters, wait_for=wait_for)
1056
+ for parameters in parameters_list
1057
+ ]
1058
+ elif task_runner := getattr(flow_run_context, "task_runner", None):
1059
+ assert isinstance(task_runner, TaskRunner)
1060
+ futures = task_runner.map(self, parameters, wait_for)
1061
+ else:
1062
+ raise RuntimeError(
1063
+ "Unable to determine task runner to use for mapped task runs. If"
1064
+ " you are mapping a task outside of a flow, please provide"
1065
+ " `deferred=True` to submit the mapped task runs for deferred"
1066
+ " execution."
1067
+ )
1068
+ if return_state:
1069
+ states = []
1070
+ for future in futures:
1071
+ future.wait()
1072
+ states.append(future.state)
1073
+ return states
1074
+ else:
1075
+ return futures
1076
+
1077
+ def apply_async(
1078
+ self,
1079
+ args: Optional[Tuple[Any, ...]] = None,
1080
+ kwargs: Optional[Dict[str, Any]] = None,
1081
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
1082
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1083
+ ) -> PrefectDistributedFuture:
1084
+ """
1085
+ Create a pending task run for a task worker to execute.
1086
+
1087
+ Args:
1088
+ args: Arguments to run the task with
1089
+ kwargs: Keyword arguments to run the task with
1090
+
1091
+ Returns:
1092
+ A PrefectDistributedFuture object representing the pending task run
1093
+
1094
+ Examples:
1095
+
1096
+ Define a task
1097
+
1098
+ >>> from prefect import task
1099
+ >>> @task
1100
+ >>> def my_task(name: str = "world"):
1101
+ >>> return f"hello {name}"
1102
+
1103
+ Create a pending task run for the task
1104
+
1105
+ >>> from prefect import flow
1106
+ >>> @flow
1107
+ >>> def my_flow():
1108
+ >>> my_task.apply_async(("marvin",))
1109
+
1110
+ Wait for a task to finish
1111
+
1112
+ >>> @flow
1113
+ >>> def my_flow():
1114
+ >>> my_task.apply_async(("marvin",)).wait()
1115
+
1116
+
1117
+ >>> @flow
1118
+ >>> def my_flow():
1119
+ >>> print(my_task.apply_async(("marvin",)).result())
1120
+ >>>
1121
+ >>> my_flow()
1122
+ hello marvin
1123
+
1124
+ TODO: Enforce ordering between tasks that do not exchange data
1125
+ >>> @task
1126
+ >>> def task_1():
1127
+ >>> pass
1128
+ >>>
1129
+ >>> @task
1130
+ >>> def task_2():
1131
+ >>> pass
1132
+ >>>
1133
+ >>> @flow
1134
+ >>> def my_flow():
1135
+ >>> x = task_1.apply_async()
1136
+ >>>
1137
+ >>> # task 2 will wait for task_1 to complete
1138
+ >>> y = task_2.apply_async(wait_for=[x])
1139
+
1140
+ """
1141
+ from prefect.utilities.visualization import (
1142
+ VisualizationUnsupportedError,
1143
+ get_task_viz_tracker,
1144
+ )
1145
+
1146
+ task_viz_tracker = get_task_viz_tracker()
1147
+ if task_viz_tracker:
1148
+ raise VisualizationUnsupportedError(
1149
+ "`task.apply_async()` is not currently supported by `flow.visualize()`"
1150
+ )
1151
+ args = args or ()
1152
+ kwargs = kwargs or {}
1153
+
1154
+ # Convert the call args/kwargs to a parameter dict
1155
+ parameters = get_call_parameters(self.fn, args, kwargs)
1156
+
1157
+ task_run = run_coro_as_sync(
1158
+ self.create_run(
1166
1159
  parameters=parameters,
1167
- flow_run_context=None,
1160
+ deferred=True,
1168
1161
  wait_for=wait_for,
1169
- return_type=return_type,
1170
- task_runner=None,
1171
- autonomous=True,
1162
+ extra_task_inputs=dependencies,
1172
1163
  )
1173
- if self.isasync:
1174
- return from_async.wait_for_call_in_loop_thread(map_call)
1175
- else:
1176
- return from_sync.wait_for_call_in_loop_thread(map_call)
1177
-
1178
- return enter_task_run_engine(
1179
- self,
1180
- parameters=parameters,
1181
- wait_for=wait_for,
1182
- return_type=return_type,
1183
- task_runner=None,
1184
- mapped=True,
1185
1164
  )
1165
+ return PrefectDistributedFuture(task_run_id=task_run.id)
1186
1166
 
1187
- def serve(self, task_runner: Optional[BaseTaskRunner] = None) -> "Task":
1167
+ def delay(self, *args: P.args, **kwargs: P.kwargs) -> PrefectDistributedFuture:
1168
+ """
1169
+ An alias for `apply_async` with simpler calling semantics.
1170
+
1171
+ Avoids having to use explicit "args" and "kwargs" arguments. Arguments
1172
+ will pass through as-is to the task.
1173
+
1174
+ Examples:
1175
+
1176
+ Define a task
1177
+
1178
+ >>> from prefect import task
1179
+ >>> @task
1180
+ >>> def my_task(name: str = "world"):
1181
+ >>> return f"hello {name}"
1182
+
1183
+ Create a pending task run for the task
1184
+
1185
+ >>> from prefect import flow
1186
+ >>> @flow
1187
+ >>> def my_flow():
1188
+ >>> my_task.delay("marvin")
1189
+
1190
+ Wait for a task to finish
1191
+
1192
+ >>> @flow
1193
+ >>> def my_flow():
1194
+ >>> my_task.delay("marvin").wait()
1195
+
1196
+ Use the result from a task in a flow
1197
+
1198
+ >>> @flow
1199
+ >>> def my_flow():
1200
+ >>> print(my_task.delay("marvin").result())
1201
+ >>>
1202
+ >>> my_flow()
1203
+ hello marvin
1204
+ """
1205
+ return self.apply_async(args=args, kwargs=kwargs)
1206
+
1207
+ def serve(self) -> "Task":
1188
1208
  """Serve the task using the provided task runner. This method is used to
1189
1209
  establish a websocket connection with the Prefect server and listen for
1190
1210
  submitted task runs to execute.
@@ -1201,16 +1221,9 @@ class Task(Generic[P, R]):
1201
1221
 
1202
1222
  >>> my_task.serve()
1203
1223
  """
1224
+ from prefect.task_worker import serve
1204
1225
 
1205
- if not PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING:
1206
- raise ValueError(
1207
- "Task's `serve` method is an experimental feature and must be enabled with "
1208
- "`prefect config set PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING=True`"
1209
- )
1210
-
1211
- from prefect.task_server import serve
1212
-
1213
- serve(self, task_runner=task_runner)
1226
+ serve(self)
1214
1227
 
1215
1228
 
1216
1229
  @overload
@@ -1225,6 +1238,7 @@ def task(
1225
1238
  description: str = None,
1226
1239
  tags: Iterable[str] = None,
1227
1240
  version: str = None,
1241
+ cache_policy: CachePolicy = NotSet,
1228
1242
  cache_key_fn: Callable[["TaskRunContext", Dict[str, Any]], Optional[str]] = None,
1229
1243
  cache_expiration: datetime.timedelta = None,
1230
1244
  task_run_name: Optional[Union[Callable[[], str], str]] = None,
@@ -1259,6 +1273,7 @@ def task(
1259
1273
  description: str = None,
1260
1274
  tags: Iterable[str] = None,
1261
1275
  version: str = None,
1276
+ cache_policy: CachePolicy = NotSet,
1262
1277
  cache_key_fn: Callable[["TaskRunContext", Dict[str, Any]], Optional[str]] = None,
1263
1278
  cache_expiration: datetime.timedelta = None,
1264
1279
  task_run_name: Optional[Union[Callable[[], str], str]] = None,
@@ -1401,6 +1416,7 @@ def task(
1401
1416
  description=description,
1402
1417
  tags=tags,
1403
1418
  version=version,
1419
+ cache_policy=cache_policy,
1404
1420
  cache_key_fn=cache_key_fn,
1405
1421
  cache_expiration=cache_expiration,
1406
1422
  task_run_name=task_run_name,
@@ -1430,6 +1446,7 @@ def task(
1430
1446
  description=description,
1431
1447
  tags=tags,
1432
1448
  version=version,
1449
+ cache_policy=cache_policy,
1433
1450
  cache_key_fn=cache_key_fn,
1434
1451
  cache_expiration=cache_expiration,
1435
1452
  task_run_name=task_run_name,